HDFS-11696. Fix warnings from Spotbugs in hadoop-hdfs. Contributed by Yiqun Lin.
This commit is contained in:
parent
645a8f2a4d
commit
2e43c28e01
@ -2901,9 +2901,12 @@ private void initThreadsNumForStripedReads(int numThreads) {
|
|||||||
}
|
}
|
||||||
synchronized (DFSClient.class) {
|
synchronized (DFSClient.class) {
|
||||||
if (STRIPED_READ_THREAD_POOL == null) {
|
if (STRIPED_READ_THREAD_POOL == null) {
|
||||||
STRIPED_READ_THREAD_POOL = DFSUtilClient.getThreadPoolExecutor(1,
|
// Only after thread pool is fully constructed then save it to
|
||||||
|
// volatile field.
|
||||||
|
ThreadPoolExecutor threadPool = DFSUtilClient.getThreadPoolExecutor(1,
|
||||||
numThreads, 60, "StripedRead-", true);
|
numThreads, 60, "StripedRead-", true);
|
||||||
STRIPED_READ_THREAD_POOL.allowCoreThreadTimeOut(true);
|
threadPool.allowCoreThreadTimeOut(true);
|
||||||
|
STRIPED_READ_THREAD_POOL = threadPool;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -101,8 +101,9 @@ public boolean equals(Object o) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
boolean areEqual;
|
boolean areEqual;
|
||||||
for (String disk : this.slowDisks.keySet()) {
|
for (Map.Entry<String, Map<DiskOp, Double>> entry : this.slowDisks
|
||||||
if (!this.slowDisks.get(disk).equals(that.slowDisks.get(disk))) {
|
.entrySet()) {
|
||||||
|
if (!entry.getValue().equals(that.slowDisks.get(entry.getKey()))) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -264,4 +264,30 @@
|
|||||||
<Field name="locations" />
|
<Field name="locations" />
|
||||||
<Bug pattern="SE_TRANSIENT_FIELD_NOT_RESTORED" />
|
<Bug pattern="SE_TRANSIENT_FIELD_NOT_RESTORED" />
|
||||||
</Match>
|
</Match>
|
||||||
|
<Match>
|
||||||
|
<Class name="org.apache.hadoop.hdfs.server.namenode.NNUpgradeUtil$1" />
|
||||||
|
<Method name="visitFile" />
|
||||||
|
<Bug pattern="NP_NULL_ON_SOME_PATH_FROM_RETURN_VALUE" />
|
||||||
|
</Match>
|
||||||
|
<!-- Ignore warnings for not changing the startup option parsing behavior. -->
|
||||||
|
<Match>
|
||||||
|
<Class name="org.apache.hadoop.hdfs.server.common.HdfsServerConstants$StartupOption" />
|
||||||
|
<Method name="setClusterId" />
|
||||||
|
<Bug pattern="ME_ENUM_FIELD_SETTER" />
|
||||||
|
</Match>
|
||||||
|
<Match>
|
||||||
|
<Class name="org.apache.hadoop.hdfs.server.common.HdfsServerConstants$StartupOption" />
|
||||||
|
<Method name="setForce" />
|
||||||
|
<Bug pattern="ME_ENUM_FIELD_SETTER" />
|
||||||
|
</Match>
|
||||||
|
<Match>
|
||||||
|
<Class name="org.apache.hadoop.hdfs.server.common.HdfsServerConstants$StartupOption" />
|
||||||
|
<Method name="setForceFormat" />
|
||||||
|
<Bug pattern="ME_ENUM_FIELD_SETTER" />
|
||||||
|
</Match>
|
||||||
|
<Match>
|
||||||
|
<Class name="org.apache.hadoop.hdfs.server.common.HdfsServerConstants$StartupOption" />
|
||||||
|
<Method name="setInteractiveFormat" />
|
||||||
|
<Bug pattern="ME_ENUM_FIELD_SETTER" />
|
||||||
|
</Match>
|
||||||
</FindBugsFilter>
|
</FindBugsFilter>
|
||||||
|
@ -299,14 +299,18 @@ public boolean accept(File file) {
|
|||||||
return file.isDirectory();
|
return file.isDirectory();
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
for (File journalDir : journalDirs) {
|
|
||||||
String jid = journalDir.getName();
|
if (journalDirs != null) {
|
||||||
if (!status.containsKey(jid)) {
|
for (File journalDir : journalDirs) {
|
||||||
Map<String, String> jMap = new HashMap<String, String>();
|
String jid = journalDir.getName();
|
||||||
jMap.put("Formatted", "true");
|
if (!status.containsKey(jid)) {
|
||||||
status.put(jid, jMap);
|
Map<String, String> jMap = new HashMap<String, String>();
|
||||||
|
jMap.put("Formatted", "true");
|
||||||
|
status.put(jid, jMap);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return JSON.toString(status);
|
return JSON.toString(status);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1336,10 +1336,14 @@ public boolean accept(File dir, String name) {
|
|||||||
return name.startsWith(BLOCK_SUBDIR_PREFIX);
|
return name.startsWith(BLOCK_SUBDIR_PREFIX);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
for(int i = 0; i < otherNames.length; i++)
|
|
||||||
linkBlocksHelper(new File(from, otherNames[i]),
|
if (otherNames != null) {
|
||||||
new File(to, otherNames[i]), oldLV, hl, upgradeToIdBasedLayout,
|
for (int i = 0; i < otherNames.length; i++) {
|
||||||
blockRoot, idBasedLayoutSingleLinks);
|
linkBlocksHelper(new File(from, otherNames[i]),
|
||||||
|
new File(to, otherNames[i]), oldLV, hl, upgradeToIdBasedLayout,
|
||||||
|
blockRoot, idBasedLayoutSingleLinks);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -255,24 +255,27 @@ public boolean accept(File dir, String name) {
|
|||||||
});
|
});
|
||||||
|
|
||||||
// Check whether there is any work to do.
|
// Check whether there is any work to do.
|
||||||
if (filesInStorage.length <= numCheckpointsToRetain) {
|
if (filesInStorage != null
|
||||||
|
&& filesInStorage.length <= numCheckpointsToRetain) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// Create a sorted list of txids from the file names.
|
// Create a sorted list of txids from the file names.
|
||||||
TreeSet<Long> sortedTxIds = new TreeSet<Long>();
|
TreeSet<Long> sortedTxIds = new TreeSet<Long>();
|
||||||
for (String fName : filesInStorage) {
|
if (filesInStorage != null) {
|
||||||
// Extract the transaction id from the file name.
|
for (String fName : filesInStorage) {
|
||||||
long fTxId;
|
// Extract the transaction id from the file name.
|
||||||
try {
|
long fTxId;
|
||||||
fTxId = Long.parseLong(fName.substring(oivImagePrefix.length() + 1));
|
try {
|
||||||
} catch (NumberFormatException nfe) {
|
fTxId = Long.parseLong(fName.substring(oivImagePrefix.length() + 1));
|
||||||
// This should not happen since we have already filtered it.
|
} catch (NumberFormatException nfe) {
|
||||||
// Log and continue.
|
// This should not happen since we have already filtered it.
|
||||||
LOG.warn("Invalid file name. Skipping " + fName);
|
// Log and continue.
|
||||||
continue;
|
LOG.warn("Invalid file name. Skipping " + fName);
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
sortedTxIds.add(Long.valueOf(fTxId));
|
||||||
}
|
}
|
||||||
sortedTxIds.add(Long.valueOf(fTxId));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
int numFilesToDelete = sortedTxIds.size() - numCheckpointsToRetain;
|
int numFilesToDelete = sortedTxIds.size() - numCheckpointsToRetain;
|
||||||
|
@ -1992,7 +1992,7 @@ public int run(String[] argv) throws Exception {
|
|||||||
return exitCode;
|
return exitCode;
|
||||||
}
|
}
|
||||||
} else if ("-report".equals(cmd)) {
|
} else if ("-report".equals(cmd)) {
|
||||||
if (argv.length < 1) {
|
if (argv.length > 6) {
|
||||||
printUsage(cmd);
|
printUsage(cmd);
|
||||||
return exitCode;
|
return exitCode;
|
||||||
}
|
}
|
||||||
@ -2022,7 +2022,7 @@ public int run(String[] argv) throws Exception {
|
|||||||
return exitCode;
|
return exitCode;
|
||||||
}
|
}
|
||||||
} else if (RollingUpgradeCommand.matches(cmd)) {
|
} else if (RollingUpgradeCommand.matches(cmd)) {
|
||||||
if (argv.length < 1 || argv.length > 2) {
|
if (argv.length > 2) {
|
||||||
printUsage(cmd);
|
printUsage(cmd);
|
||||||
return exitCode;
|
return exitCode;
|
||||||
}
|
}
|
||||||
@ -2097,7 +2097,7 @@ public int run(String[] argv) throws Exception {
|
|||||||
return exitCode;
|
return exitCode;
|
||||||
}
|
}
|
||||||
} else if ("-triggerBlockReport".equals(cmd)) {
|
} else if ("-triggerBlockReport".equals(cmd)) {
|
||||||
if (argv.length < 1) {
|
if ((argv.length != 2) && (argv.length != 3)) {
|
||||||
printUsage(cmd);
|
printUsage(cmd);
|
||||||
return exitCode;
|
return exitCode;
|
||||||
}
|
}
|
||||||
|
@ -722,9 +722,13 @@ private void processINode(DataInputStream in, ImageVisitor v,
|
|||||||
if (supportSnapshot && supportInodeId) {
|
if (supportSnapshot && supportInodeId) {
|
||||||
dirNodeMap.put(inodeId, pathName);
|
dirNodeMap.put(inodeId, pathName);
|
||||||
}
|
}
|
||||||
v.visit(ImageElement.NS_QUOTA, numBlocks == -1 ? in.readLong() : -1);
|
|
||||||
if (NameNodeLayoutVersion.supports(Feature.DISKSPACE_QUOTA, imageVersion))
|
v.visit(ImageElement.NS_QUOTA, in.readLong());
|
||||||
v.visit(ImageElement.DS_QUOTA, numBlocks == -1 ? in.readLong() : -1);
|
if (NameNodeLayoutVersion.supports(Feature.DISKSPACE_QUOTA,
|
||||||
|
imageVersion)) {
|
||||||
|
v.visit(ImageElement.DS_QUOTA, in.readLong());
|
||||||
|
}
|
||||||
|
|
||||||
if (supportSnapshot) {
|
if (supportSnapshot) {
|
||||||
boolean snapshottable = in.readBoolean();
|
boolean snapshottable = in.readBoolean();
|
||||||
if (!snapshottable) {
|
if (!snapshottable) {
|
||||||
|
@ -138,5 +138,30 @@ public void testRollingUpgrade() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testFormat() {
|
||||||
|
String[] args = new String[] {"-format"};
|
||||||
|
StartupOption opt = NameNode.parseArguments(args);
|
||||||
|
assertEquals(StartupOption.FORMAT, opt);
|
||||||
|
assertEquals(true, opt.getInteractiveFormat());
|
||||||
|
assertEquals(false, opt.getForceFormat());
|
||||||
|
|
||||||
|
args = new String[] {"-format", "-nonInteractive"};
|
||||||
|
opt = NameNode.parseArguments(args);
|
||||||
|
assertEquals(StartupOption.FORMAT, opt);
|
||||||
|
assertEquals(false, opt.getInteractiveFormat());
|
||||||
|
assertEquals(false, opt.getForceFormat());
|
||||||
|
|
||||||
|
args = new String[] {"-format", "-nonInteractive", "-force"};
|
||||||
|
opt = NameNode.parseArguments(args);
|
||||||
|
assertEquals(StartupOption.FORMAT, opt);
|
||||||
|
assertEquals(false, opt.getInteractiveFormat());
|
||||||
|
assertEquals(true, opt.getForceFormat());
|
||||||
|
|
||||||
|
// test error condition
|
||||||
|
args = new String[] {"-nonInteractive"};
|
||||||
|
opt = NameNode.parseArguments(args);
|
||||||
|
assertNull(opt);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user