HDFS-16352. return the real datanode numBlocks in #getDatanodeStorageReport (#3714). Contributed by liubingxing.

Signed-off-by: He Xiaoqiao <hexiaoqiao@apache.org>
This commit is contained in:
liubingxing 2021-12-16 23:31:28 -06:00 committed by GitHub
parent a967033a9f
commit d8dea6f52a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
3 changed files with 35 additions and 4 deletions

View File

@ -698,9 +698,10 @@ public static class DatanodeInfoBuilder {
private long nonDfsUsed = 0L;
private long lastBlockReportTime = 0L;
private long lastBlockReportMonotonic = 0L;
private int numBlocks;
private int numBlocks = 0;
// Please use setNumBlocks explicitly to set numBlocks as this method doesn't have
// sufficient info about numBlocks
public DatanodeInfoBuilder setFrom(DatanodeInfo from) {
this.capacity = from.getCapacity();
this.dfsUsed = from.getDfsUsed();
@ -717,7 +718,6 @@ public DatanodeInfoBuilder setFrom(DatanodeInfo from) {
this.upgradeDomain = from.getUpgradeDomain();
this.lastBlockReportTime = from.getLastBlockReportTime();
this.lastBlockReportMonotonic = from.getLastBlockReportMonotonic();
this.numBlocks = from.getNumBlocks();
setNodeID(from);
return this;
}

View File

@ -2182,7 +2182,8 @@ public DatanodeStorageReport[] getDatanodeStorageReport(
for (int i = 0; i < reports.length; i++) {
final DatanodeDescriptor d = datanodes.get(i);
reports[i] = new DatanodeStorageReport(
new DatanodeInfoBuilder().setFrom(d).build(), d.getStorageReports());
new DatanodeInfoBuilder().setFrom(d).setNumBlocks(d.numBlocks()).build(),
d.getStorageReports());
}
return reports;
}

View File

@ -20,9 +20,14 @@
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.UnresolvedLinkException;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
import org.apache.hadoop.hdfs.server.protocol.DatanodeStorageReport;
import org.apache.hadoop.hdfs.server.protocol.NamenodeProtocols;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.test.GenericTestUtils;
@ -31,6 +36,8 @@
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class TestNameNodeRpcServerMethods {
private static NamenodeProtocols nnRpc;
private static Configuration conf;
@ -83,4 +90,27 @@ public void testDeleteSnapshotWhenSnapshotNameIsEmpty() throws Exception {
}
@Test
public void testGetDatanodeStorageReportWithNumBLocksNotZero() throws Exception {
int buffSize = 1024;
long blockSize = 1024 * 1024;
String file = "/testFile";
DistributedFileSystem dfs = cluster.getFileSystem();
FSDataOutputStream outputStream = dfs.create(
new Path(file), true, buffSize, (short)1, blockSize);
byte[] outBuffer = new byte[buffSize];
for (int i = 0; i < buffSize; i++) {
outBuffer[i] = (byte) (i & 0x00ff);
}
outputStream.write(outBuffer);
outputStream.close();
int numBlocks = 0;
DatanodeStorageReport[] reports
= nnRpc.getDatanodeStorageReport(HdfsConstants.DatanodeReportType.ALL);
for (DatanodeStorageReport r : reports) {
numBlocks += r.getDatanodeInfo().getNumBlocks();
}
assertEquals(1, numBlocks);
}
}