HDFS-11696. Fix warnings from Spotbugs in hadoop-hdfs. Contributed by Yiqun Lin.

This commit is contained in:
Yiqun Lin 2017-08-15 16:48:49 +08:00
parent 645a8f2a4d
commit 2e43c28e01
9 changed files with 103 additions and 33 deletions

View File

@ -2901,9 +2901,12 @@ private void initThreadsNumForStripedReads(int numThreads) {
}
synchronized (DFSClient.class) {
if (STRIPED_READ_THREAD_POOL == null) {
STRIPED_READ_THREAD_POOL = DFSUtilClient.getThreadPoolExecutor(1,
// Only after thread pool is fully constructed then save it to
// volatile field.
ThreadPoolExecutor threadPool = DFSUtilClient.getThreadPoolExecutor(1,
numThreads, 60, "StripedRead-", true);
STRIPED_READ_THREAD_POOL.allowCoreThreadTimeOut(true);
threadPool.allowCoreThreadTimeOut(true);
STRIPED_READ_THREAD_POOL = threadPool;
}
}
}

View File

@ -101,8 +101,9 @@ public boolean equals(Object o) {
}
boolean areEqual;
for (String disk : this.slowDisks.keySet()) {
if (!this.slowDisks.get(disk).equals(that.slowDisks.get(disk))) {
for (Map.Entry<String, Map<DiskOp, Double>> entry : this.slowDisks
.entrySet()) {
if (!entry.getValue().equals(that.slowDisks.get(entry.getKey()))) {
return false;
}
}

View File

@ -264,4 +264,30 @@
<Field name="locations" />
<Bug pattern="SE_TRANSIENT_FIELD_NOT_RESTORED" />
</Match>
<Match>
<Class name="org.apache.hadoop.hdfs.server.namenode.NNUpgradeUtil$1" />
<Method name="visitFile" />
<Bug pattern="NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE" />
</Match>
<!-- Ignore warnings for not changing the startup option parsing behavior. -->
<Match>
<Class name="org.apache.hadoop.hdfs.server.common.HdfsServerConstants$StartupOption" />
<Method name="setClusterId" />
<Bug pattern="ME_ENUM_FIELD_SETTER" />
</Match>
<Match>
<Class name="org.apache.hadoop.hdfs.server.common.HdfsServerConstants$StartupOption" />
<Method name="setForce" />
<Bug pattern="ME_ENUM_FIELD_SETTER" />
</Match>
<Match>
<Class name="org.apache.hadoop.hdfs.server.common.HdfsServerConstants$StartupOption" />
<Method name="setForceFormat" />
<Bug pattern="ME_ENUM_FIELD_SETTER" />
</Match>
<Match>
<Class name="org.apache.hadoop.hdfs.server.common.HdfsServerConstants$StartupOption" />
<Method name="setInteractiveFormat" />
<Bug pattern="ME_ENUM_FIELD_SETTER" />
</Match>
</FindBugsFilter>

View File

@ -299,6 +299,8 @@ public boolean accept(File file) {
return file.isDirectory();
}
});
if (journalDirs != null) {
for (File journalDir : journalDirs) {
String jid = journalDir.getName();
if (!status.containsKey(jid)) {
@ -307,6 +309,8 @@ public boolean accept(File file) {
status.put(jid, jMap);
}
}
}
return JSON.toString(status);
}

View File

@ -1336,11 +1336,15 @@ public boolean accept(File dir, String name) {
return name.startsWith(BLOCK_SUBDIR_PREFIX);
}
});
for(int i = 0; i < otherNames.length; i++)
if (otherNames != null) {
for (int i = 0; i < otherNames.length; i++) {
linkBlocksHelper(new File(from, otherNames[i]),
new File(to, otherNames[i]), oldLV, hl, upgradeToIdBasedLayout,
blockRoot, idBasedLayoutSingleLinks);
}
}
}
/**
* Get the BlockPoolSliceStorage from {@link bpStorageMap}.

View File

@ -255,12 +255,14 @@ public boolean accept(File dir, String name) {
});
// Check whether there is any work to do.
if (filesInStorage.length <= numCheckpointsToRetain) {
if (filesInStorage != null
&& filesInStorage.length <= numCheckpointsToRetain) {
return;
}
// Create a sorted list of txids from the file names.
TreeSet<Long> sortedTxIds = new TreeSet<Long>();
if (filesInStorage != null) {
for (String fName : filesInStorage) {
// Extract the transaction id from the file name.
long fTxId;
@ -274,6 +276,7 @@ public boolean accept(File dir, String name) {
}
sortedTxIds.add(Long.valueOf(fTxId));
}
}
int numFilesToDelete = sortedTxIds.size() - numCheckpointsToRetain;
Iterator<Long> iter = sortedTxIds.iterator();

View File

@ -1992,7 +1992,7 @@ public int run(String[] argv) throws Exception {
return exitCode;
}
} else if ("-report".equals(cmd)) {
if (argv.length < 1) {
if (argv.length > 6) {
printUsage(cmd);
return exitCode;
}
@ -2022,7 +2022,7 @@ public int run(String[] argv) throws Exception {
return exitCode;
}
} else if (RollingUpgradeCommand.matches(cmd)) {
if (argv.length < 1 || argv.length > 2) {
if (argv.length > 2) {
printUsage(cmd);
return exitCode;
}
@ -2097,7 +2097,7 @@ public int run(String[] argv) throws Exception {
return exitCode;
}
} else if ("-triggerBlockReport".equals(cmd)) {
if (argv.length < 1) {
if ((argv.length != 2) && (argv.length != 3)) {
printUsage(cmd);
return exitCode;
}

View File

@ -722,9 +722,13 @@ private void processINode(DataInputStream in, ImageVisitor v,
if (supportSnapshot && supportInodeId) {
dirNodeMap.put(inodeId, pathName);
}
v.visit(ImageElement.NS_QUOTA, numBlocks == -1 ? in.readLong() : -1);
if (NameNodeLayoutVersion.supports(Feature.DISKSPACE_QUOTA, imageVersion))
v.visit(ImageElement.DS_QUOTA, numBlocks == -1 ? in.readLong() : -1);
v.visit(ImageElement.NS_QUOTA, in.readLong());
if (NameNodeLayoutVersion.supports(Feature.DISKSPACE_QUOTA,
imageVersion)) {
v.visit(ImageElement.DS_QUOTA, in.readLong());
}
if (supportSnapshot) {
boolean snapshottable = in.readBoolean();
if (!snapshottable) {

View File

@ -139,4 +139,29 @@ public void testRollingUpgrade() {
}
}
@Test
public void testFormat() {
String[] args = new String[] {"-format"};
StartupOption opt = NameNode.parseArguments(args);
assertEquals(StartupOption.FORMAT, opt);
assertEquals(true, opt.getInteractiveFormat());
assertEquals(false, opt.getForceFormat());
args = new String[] {"-format", "-nonInteractive"};
opt = NameNode.parseArguments(args);
assertEquals(StartupOption.FORMAT, opt);
assertEquals(false, opt.getInteractiveFormat());
assertEquals(false, opt.getForceFormat());
args = new String[] {"-format", "-nonInteractive", "-force"};
opt = NameNode.parseArguments(args);
assertEquals(StartupOption.FORMAT, opt);
assertEquals(false, opt.getInteractiveFormat());
assertEquals(true, opt.getForceFormat());
// test error condition
args = new String[] {"-nonInteractive"};
opt = NameNode.parseArguments(args);
assertNull(opt);
}
}