HDFS-5003. TestNNThroughputBenchmark failed caused by existing directories. Contributed by Xi Fang.
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1504352 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
68faa67f1b
commit
a5cd4b9bee
@ -679,6 +679,9 @@ Release 2.1.0-beta - 2013-07-02
|
|||||||
HDFS-4980. Incorrect logging.properties file for hadoop-httpfs.
|
HDFS-4980. Incorrect logging.properties file for hadoop-httpfs.
|
||||||
(Mark Grover via suresh)
|
(Mark Grover via suresh)
|
||||||
|
|
||||||
|
HDFS-5003. TestNNThroughputBenchmark failed caused by existing directories.
|
||||||
|
(Xi Fang via cnauroth)
|
||||||
|
|
||||||
BREAKDOWN OF HDFS-347 SUBTASKS AND RELATED JIRAS
|
BREAKDOWN OF HDFS-347 SUBTASKS AND RELATED JIRAS
|
||||||
|
|
||||||
HDFS-4353. Encapsulate connections to peers in Peer and PeerServer classes.
|
HDFS-4353. Encapsulate connections to peers in Peer and PeerServer classes.
|
||||||
|
@ -121,7 +121,7 @@ public class NNThroughputBenchmark {
|
|||||||
File excludeFile = new File(config.get(DFSConfigKeys.DFS_HOSTS_EXCLUDE,
|
File excludeFile = new File(config.get(DFSConfigKeys.DFS_HOSTS_EXCLUDE,
|
||||||
"exclude"));
|
"exclude"));
|
||||||
if(!excludeFile.exists()) {
|
if(!excludeFile.exists()) {
|
||||||
if(!excludeFile.getParentFile().mkdirs())
|
if(!excludeFile.getParentFile().exists() && !excludeFile.getParentFile().mkdirs())
|
||||||
throw new IOException("NNThroughputBenchmark: cannot mkdir " + excludeFile);
|
throw new IOException("NNThroughputBenchmark: cannot mkdir " + excludeFile);
|
||||||
}
|
}
|
||||||
new FileOutputStream(excludeFile).close();
|
new FileOutputStream(excludeFile).close();
|
||||||
|
Loading…
Reference in New Issue
Block a user