diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java index aae013fd77..df519c84e8 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestListFiles.java @@ -45,19 +45,39 @@ public class TestListFiles { final protected static Configuration conf = new Configuration(); protected static FileSystem fs; - final protected static Path TEST_DIR = getTestDir(); + protected static Path TEST_DIR; final private static int FILE_LEN = 10; - final private static Path FILE1 = new Path(TEST_DIR, "file1"); - final private static Path DIR1 = new Path(TEST_DIR, "dir1"); - final private static Path FILE2 = new Path(DIR1, "file2"); - final private static Path FILE3 = new Path(DIR1, "file3"); + private static Path FILE1; + private static Path DIR1; + private static Path FILE2; + private static Path FILE3; + + static { + setTestPaths(new Path( + System.getProperty("test.build.data", "build/test/data/work-dir/localfs"), + "main_")); + } protected static Path getTestDir() { - return new Path( - System.getProperty("test.build.data","build/test/data/work-dir/localfs"), - "main_"); + return TEST_DIR; } - + + /** + * Sets the root testing directory and reinitializes any additional test paths + * that are under the root. This method is intended to be called from a + * subclass's @BeforeClass method if there is a need to override the testing + * directory. + * + * @param testDir Path root testing directory + */ + protected static void setTestPaths(Path testDir) { + TEST_DIR = testDir; + FILE1 = new Path(TEST_DIR, "file1"); + DIR1 = new Path(TEST_DIR, "dir1"); + FILE2 = new Path(DIR1, "file2"); + FILE3 = new Path(DIR1, "file3"); + } + @BeforeClass public static void testSetUp() throws Exception { fs = FileSystem.getLocal(conf); diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt index 1e13a49e68..6b31b48262 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt @@ -310,6 +310,9 @@ Release 2.0.4-beta - UNRELEASED OPTIMIZATIONS BUG FIXES + + HDFS-4470. Several HDFS tests attempt file operations on invalid HDFS + paths when running on Windows. (Chris Nauroth via suresh) Release 2.0.3-alpha - 2013-02-06 diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileLengthOnClusterRestart.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileLengthOnClusterRestart.java index f63ba9a53a..8bafee67f8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileLengthOnClusterRestart.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFileLengthOnClusterRestart.java @@ -43,7 +43,7 @@ public void testFileLengthWithHSyncAndClusterRestartWithOutDNsRegister() .numDataNodes(2).build(); HdfsDataInputStream in = null; try { - Path path = new Path(MiniDFSCluster.getBaseDirectory(), "test"); + Path path = new Path("/tmp/TestFileLengthOnClusterRestart", "test"); DistributedFileSystem dfs = (DistributedFileSystem) cluster .getFileSystem(); FSDataOutputStream out = dfs.create(path); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestLargeBlock.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestLargeBlock.java index 9563361094..64c5ef4c05 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestLargeBlock.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestLargeBlock.java @@ -183,8 +183,7 @@ public void runTest(final long blockSize) throws IOException { try { // create a new file in test data directory - Path file1 = new Path(System.getProperty("test.build.data") + "/" + - Long.toString(blockSize) + ".dat"); + Path file1 = new Path("/tmp/TestLargeBlock", blockSize + ".dat"); FSDataOutputStream stm = createFile(fs, file1, 1, blockSize); LOG.info("File " + file1 + " created with file size " + fileSize + diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestListFilesInDFS.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestListFilesInDFS.java index ec9e7e2e48..d68563dec8 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestListFilesInDFS.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestListFilesInDFS.java @@ -38,6 +38,7 @@ public class TestListFilesInDFS extends TestListFiles { @BeforeClass public static void testSetUp() throws Exception { + setTestPaths(new Path("/tmp/TestListFilesInDFS")); cluster = new MiniDFSCluster.Builder(conf).build(); fs = cluster.getFileSystem(); fs.delete(TEST_DIR, true); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestRBWBlockInvalidation.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestRBWBlockInvalidation.java index c07fae4773..0b7eaeeed9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestRBWBlockInvalidation.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/blockmanagement/TestRBWBlockInvalidation.java @@ -67,7 +67,7 @@ public void testBlockInvalidationWhenRBWReplicaMissedInDN() try { final FSNamesystem namesystem = cluster.getNamesystem(); FileSystem fs = cluster.getFileSystem(); - Path testPath = new Path(MiniDFSCluster.getBaseDirectory(), "foo1"); + Path testPath = new Path("/tmp/TestRBWBlockInvalidation", "foo1"); out = fs.create(testPath, (short) 2); out.writeBytes("HDFS-3157: " + testPath); out.hsync();