From cc6c265171aace1e57653e777a4a73a747221086 Mon Sep 17 00:00:00 2001 From: Vinayakumar B Date: Thu, 16 Jun 2016 16:47:06 +0530 Subject: [PATCH] HDFS-10256. Use GenericTestUtils.getTestDir method in tests for temporary directories (Contributed by Vinayakumar B) --- .../java/org/apache/hadoop/fs/FileUtil.java | 14 ++++ .../apache/hadoop/fs/TestGetSpaceUsed.java | 5 +- .../security/token/TestDtUtilShell.java | 3 +- .../shortcircuit/TestShortCircuitShm.java | 4 +- .../TestHttpFSFWithSWebhdfsFileSystem.java | 5 +- .../TestHttpFSFileSystemLocalFileSystem.java | 5 +- .../hdfs/nfs/nfs3/TestNfs3HttpServer.java | 5 +- .../apache/hadoop/cli/TestCryptoAdminCLI.java | 4 +- .../fs/TestEnhancedByteBufferAccess.java | 12 ++-- ...TestSWebHdfsFileContextMainOperations.java | 6 +- .../TestWebHdfsFileContextMainOperations.java | 7 +- .../apache/hadoop/hdfs/MiniDFSCluster.java | 17 +++-- .../hdfs/TestAppendSnapshotTruncate.java | 4 +- .../org/apache/hadoop/hdfs/TestDFSShell.java | 4 +- .../hadoop/hdfs/TestDFSUpgradeFromImage.java | 18 ++--- .../org/apache/hadoop/hdfs/TestDFSUtil.java | 3 +- .../hdfs/TestDatanodeLayoutUpgrade.java | 21 +++--- ...tDatanodeStartupFixesLegacyStorageIDs.java | 12 ++-- .../apache/hadoop/hdfs/TestFetchImage.java | 5 +- .../org/apache/hadoop/hdfs/TestPread.java | 2 +- .../org/apache/hadoop/hdfs/TestSeekBug.java | 3 +- .../sasl/SaslDataTransferTestCase.java | 5 +- .../hdfs/qjournal/TestSecureNNWithQJM.java | 5 +- .../hdfs/server/balancer/TestBalancer.java | 3 +- .../server/datanode/TestDataNodeUUID.java | 3 +- .../hdfs/server/datanode/TestDataStorage.java | 3 +- .../server/namenode/TestCreateEditsLog.java | 6 +- .../namenode/TestDecommissioningStatus.java | 65 +++++-------------- .../namenode/TestEditLogFileInputStream.java | 3 +- .../namenode/TestFSImageWithSnapshot.java | 4 +- .../hadoop/hdfs/server/namenode/TestFsck.java | 11 ++-- .../namenode/TestNameNodeHttpServer.java | 5 +- .../server/namenode/TestNameNodeRecovery.java | 5 +- .../TestNameNodeRespectsBindHostKeys.java | 5 +- .../snapshot/TestRenameWithSnapshots.java | 2 +- .../namenode/snapshot/TestSnapshot.java | 4 +- .../apache/hadoop/hdfs/tools/TestGetConf.java | 62 ++++-------------- .../hadoop/hdfs/util/HostsFileWriter.java | 32 +++++++-- .../util/TestCombinedHostsFileReader.java | 5 +- .../hadoop/hdfs/web/TestHttpsFileSystem.java | 5 +- .../hadoop/hdfs/web/TestWebHdfsTokens.java | 9 +-- .../org/apache/hadoop/test/PathUtils.java | 7 +- 42 files changed, 189 insertions(+), 219 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java index e2d6ecdc07..53ced77bb9 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java @@ -98,6 +98,20 @@ public static Path[] stat2Paths(FileStatus[] stats, Path path) { return stat2Paths(stats); } + /** + * Register all files recursively to be deleted on exit. + * @param file File/directory to be deleted + */ + public static void fullyDeleteOnExit(final File file) { + file.deleteOnExit(); + if (file.isDirectory()) { + File[] files = file.listFiles(); + for (File child : files) { + fullyDeleteOnExit(child); + } + } + } + /** * Delete a directory and all its contents. If * we return false, the directory may be partially-deleted. diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetSpaceUsed.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetSpaceUsed.java index f436713ea6..d696dbfe40 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetSpaceUsed.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestGetSpaceUsed.java @@ -18,6 +18,7 @@ package org.apache.hadoop.fs; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.test.GenericTestUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -28,8 +29,8 @@ import static org.junit.Assert.*; public class TestGetSpaceUsed { - final static private File DIR = new File( - System.getProperty("test.build.data", "/tmp"), "TestGetSpaceUsed"); + final static private File DIR = + GenericTestUtils.getTestDir("TestGetSpaceUsed"); @Before public void setUp() { diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/TestDtUtilShell.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/TestDtUtilShell.java index 5f8f8305b6..1c259127a0 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/TestDtUtilShell.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/security/token/TestDtUtilShell.java @@ -31,6 +31,7 @@ import org.apache.hadoop.security.Credentials; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; +import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.security.token.DtFetcher; import static org.junit.Assert.assertEquals; @@ -71,7 +72,7 @@ public class TestDtUtilShell { private final ByteArrayOutputStream outContent = new ByteArrayOutputStream(); private final Path workDir = new Path( - System.getProperty("test.build.data", "/tmp"), "TestDtUtilShell"); + GenericTestUtils.getTestDir("TestDtUtilShell").getAbsolutePath()); private final Path tokenFile = new Path(workDir, "testPrintTokenFile"); private final Path tokenFile2 = new Path(workDir, "testPrintTokenFile2"); private final Path tokenLegacyFile = new Path(workDir, "testPrintTokenFile3"); diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/shortcircuit/TestShortCircuitShm.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/shortcircuit/TestShortCircuitShm.java index 9d48444798..0c202edac6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/shortcircuit/TestShortCircuitShm.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/test/java/org/apache/hadoop/hdfs/shortcircuit/TestShortCircuitShm.java @@ -22,6 +22,7 @@ import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm.ShmId; import org.apache.hadoop.hdfs.shortcircuit.ShortCircuitShm.Slot; import org.apache.hadoop.io.nativeio.SharedFileDescriptorFactory; +import org.apache.hadoop.test.GenericTestUtils; import org.junit.Assert; import org.junit.Assume; import org.junit.Before; @@ -38,8 +39,7 @@ public class TestShortCircuitShm { public static final Logger LOG = LoggerFactory.getLogger( TestShortCircuitShm.class); - private static final File TEST_BASE = - new File(System.getProperty("test.build.data", "/tmp")); + private static final File TEST_BASE = GenericTestUtils.getTestDir(); @Before public void before() { diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFWithSWebhdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFWithSWebhdfsFileSystem.java index 9c3d700103..d8f860a94d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFWithSWebhdfsFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFWithSWebhdfsFileSystem.java @@ -23,6 +23,7 @@ import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.hdfs.web.SWebHdfsFileSystem; import org.apache.hadoop.security.ssl.KeyStoreTestUtil; +import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.TestJettyHelper; import org.junit.AfterClass; import org.junit.runner.RunWith; @@ -37,8 +38,8 @@ public class TestHttpFSFWithSWebhdfsFileSystem extends TestHttpFSWithHttpFSFileSystem { private static String classpathDir; - private static final String BASEDIR = System.getProperty("test.build.dir", - "target/test-dir") + "/" + UUID.randomUUID(); + private static final String BASEDIR = + GenericTestUtils.getTempPath(UUID.randomUUID().toString()); private static Configuration sslConf; diff --git a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystemLocalFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystemLocalFileSystem.java index e9a80e3465..83bcb2efac 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystemLocalFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-httpfs/src/test/java/org/apache/hadoop/fs/http/client/TestHttpFSFileSystemLocalFileSystem.java @@ -25,6 +25,7 @@ import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.TestDirHelper; import org.junit.Assert; import org.junit.runner.RunWith; @@ -41,9 +42,7 @@ public class TestHttpFSFileSystemLocalFileSystem extends BaseTestHttpFSWith { static { new TestDirHelper(); - String prefix = - System.getProperty("test.build.dir", "target/test-dir") + "/local"; - File file = new File(prefix); + File file = GenericTestUtils.getTestDir("local"); file.mkdirs(); PATH_PREFIX = file.getAbsolutePath(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/nfs3/TestNfs3HttpServer.java b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/nfs3/TestNfs3HttpServer.java index 46dbd42f4c..952aae2b58 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/nfs3/TestNfs3HttpServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs-nfs/src/test/java/org/apache/hadoop/hdfs/nfs/nfs3/TestNfs3HttpServer.java @@ -30,13 +30,14 @@ import org.apache.hadoop.hdfs.nfs.conf.NfsConfiguration; import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.security.ssl.KeyStoreTestUtil; +import org.apache.hadoop.test.GenericTestUtils; import org.junit.AfterClass; import org.junit.BeforeClass; import org.junit.Test; public class TestNfs3HttpServer { - private static final String BASEDIR = System.getProperty("test.build.dir", - "target/test-dir") + "/" + TestNfs3HttpServer.class.getSimpleName(); + private static final String BASEDIR = + GenericTestUtils.getTempPath(TestNfs3HttpServer.class.getSimpleName()); private static NfsConfiguration conf = new NfsConfiguration(); private static MiniDFSCluster cluster; private static String keystoresDir; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java index bd5349d82d..7c6aaeb9ed 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/cli/TestCryptoAdminCLI.java @@ -44,6 +44,7 @@ import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.tools.CryptoAdmin; import org.apache.hadoop.security.authorize.PolicyProvider; +import org.apache.hadoop.test.GenericTestUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -63,8 +64,7 @@ public void setUp() throws Exception { HDFSPolicyProvider.class, PolicyProvider.class); conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 1); - tmpDir = new File(System.getProperty("test.build.data", "target"), - UUID.randomUUID().toString()).getAbsoluteFile(); + tmpDir = GenericTestUtils.getTestDir(UUID.randomUUID().toString()); final Path jksPath = new Path(tmpDir.toString(), "test.jks"); conf.set(DFSConfigKeys.DFS_ENCRYPTION_KEY_PROVIDER_URI, JavaKeyStoreProvider.SCHEME_NAME + "://file" + jksPath.toUri()); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestEnhancedByteBufferAccess.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestEnhancedByteBufferAccess.java index 32a34e8a68..0ccc07a833 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestEnhancedByteBufferAccess.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestEnhancedByteBufferAccess.java @@ -557,27 +557,25 @@ public void testFallbackRead() throws Exception { */ @Test public void testIndirectFallbackReads() throws Exception { - final File TEST_DIR = new File( - System.getProperty("test.build.data","build/test/data")); - final String TEST_PATH = TEST_DIR + File.separator + - "indirectFallbackTestFile"; + final String testPath = GenericTestUtils + .getTestDir("indirectFallbackTestFile").getAbsolutePath(); final int TEST_FILE_LENGTH = 16385; final int RANDOM_SEED = 23453; FileOutputStream fos = null; FileInputStream fis = null; try { - fos = new FileOutputStream(TEST_PATH); + fos = new FileOutputStream(testPath); Random random = new Random(RANDOM_SEED); byte original[] = new byte[TEST_FILE_LENGTH]; random.nextBytes(original); fos.write(original); fos.close(); fos = null; - fis = new FileInputStream(TEST_PATH); + fis = new FileInputStream(testPath); testFallbackImpl(fis, original); } finally { IOUtils.cleanup(LOG, fos, fis); - new File(TEST_PATH).delete(); + new File(testPath).delete(); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestSWebHdfsFileContextMainOperations.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestSWebHdfsFileContextMainOperations.java index 02110ac703..84bd98bf67 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestSWebHdfsFileContextMainOperations.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestSWebHdfsFileContextMainOperations.java @@ -24,6 +24,7 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.ssl.KeyStoreTestUtil; import org.apache.hadoop.security.ssl.SSLFactory; +import org.apache.hadoop.test.GenericTestUtils; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -50,9 +51,8 @@ public class TestSWebHdfsFileContextMainOperations private static final HdfsConfiguration CONF = new HdfsConfiguration(); - private static final String BASEDIR = - System.getProperty("test.build.dir", "target/test-dir") + "/" - + TestSWebHdfsFileContextMainOperations.class.getSimpleName(); + private static final String BASEDIR = GenericTestUtils + .getTempPath(TestSWebHdfsFileContextMainOperations.class.getSimpleName()); protected static int numBlocks = 2; protected static final byte[] data = getFileData(numBlocks, getDefaultBlockSize()); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestWebHdfsFileContextMainOperations.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestWebHdfsFileContextMainOperations.java index ec91cd1667..72fc6e6274 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestWebHdfsFileContextMainOperations.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/fs/TestWebHdfsFileContextMainOperations.java @@ -23,6 +23,7 @@ import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.security.UserGroupInformation; +import org.apache.hadoop.test.GenericTestUtils; import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; @@ -63,7 +64,7 @@ public Path getDefaultWorkingDirectory() { @Override protected FileContextTestHelper createFileContextHelper() { - return new FileContextTestHelper("/tmp/TestWebHdfsFileContextMainOperations"); + return new FileContextTestHelper(); } public URI getWebhdfsUrl() { @@ -88,8 +89,8 @@ public static void clusterSetupAtBeginning() public void setUp() throws Exception { URI webhdfsUrlReal = getWebhdfsUrl(); Path testBuildData = new Path( - webhdfsUrlReal + "/build/test/data/" + RandomStringUtils - .randomAlphanumeric(10)); + webhdfsUrlReal + "/" + GenericTestUtils.DEFAULT_TEST_DATA_PATH + + RandomStringUtils.randomAlphanumeric(10)); Path rootPath = new Path(testBuildData, "root-uri"); localFsRootPath = rootPath.makeQualified(webhdfsUrlReal, null); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java index ae6a133bbb..3b3a88b391 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/MiniDFSCluster.java @@ -103,7 +103,6 @@ import org.apache.hadoop.hdfs.server.datanode.SimulatedFSDataset; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi; -import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsDatasetUtil; import org.apache.hadoop.hdfs.server.datanode.fsdataset.impl.FsVolumeImpl; import org.apache.hadoop.hdfs.server.namenode.EditLogFileOutputStream; import org.apache.hadoop.hdfs.server.namenode.FSNamesystem; @@ -142,7 +141,8 @@ public class MiniDFSCluster { private static final String NAMESERVICE_ID_PREFIX = "nameserviceId"; private static final Log LOG = LogFactory.getLog(MiniDFSCluster.class); /** System property to set the data dir: {@value} */ - public static final String PROP_TEST_BUILD_DATA = "test.build.data"; + public static final String PROP_TEST_BUILD_DATA = + GenericTestUtils.SYSPROP_TEST_DATA_DIR; /** Configuration option to set the data dir: {@value} */ public static final String HDFS_MINIDFS_BASEDIR = "hdfs.minidfs.basedir"; public static final String DFS_NAMENODE_SAFEMODE_EXTENSION_TESTING_KEY @@ -1931,12 +1931,11 @@ public void shutdown(boolean deleteDfsDir, boolean closeFileSystem) { ShutdownHookManager.get().clearShutdownHooks(); if (base_dir != null) { if (deleteDfsDir) { - base_dir.delete(); + FileUtil.fullyDelete(base_dir); } else { - base_dir.deleteOnExit(); + FileUtil.fullyDeleteOnExit(base_dir); } } - } /** @@ -2738,13 +2737,13 @@ protected String determineDfsBaseDir() { /** * Get the base directory for any DFS cluster whose configuration does - * not explicitly set it. This is done by retrieving the system property - * {@link #PROP_TEST_BUILD_DATA} (defaulting to "build/test/data" ), - * and returning that directory with a subdir of /dfs. + * not explicitly set it. This is done via + * {@link GenericTestUtils#getTestDir()}. * @return a directory for use as a miniDFS filesystem. */ public static String getBaseDirectory() { - return System.getProperty(PROP_TEST_BUILD_DATA, "build/test/data") + "/dfs/"; + return GenericTestUtils.getTestDir("dfs").getAbsolutePath() + + File.separator; } /** diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestAppendSnapshotTruncate.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestAppendSnapshotTruncate.java index f41f80946a..0280f8c866 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestAppendSnapshotTruncate.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestAppendSnapshotTruncate.java @@ -115,9 +115,7 @@ public void testAST() throws Exception { dfs.mkdirs(dir); dfs.allowSnapshot(dir); - final File localDir = new File( - System.getProperty("test.build.data", "target/test/data") - + dirPathString); + final File localDir = GenericTestUtils.getTestDir(dirPathString); if (localDir.exists()) { FileUtil.fullyDelete(localDir); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java index e31de13760..24d8b90f16 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSShell.java @@ -3274,8 +3274,8 @@ public void testCopyReserved() throws IOException { assertTrue(e.getMessage().contains("Invalid path name /.reserved")); } - final String testdir = System.getProperty("test.build.data") - + "/TestDFSShell-testCopyReserved"; + final String testdir = GenericTestUtils.getTempPath( + "TestDFSShell-testCopyReserved"); final Path hdfsTestDir = new Path(testdir); writeFile(fs, new Path(testdir, "testFileForPut")); final Path src = new Path(hdfsTestDir, "srcfile"); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUpgradeFromImage.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUpgradeFromImage.java index 60bea7ab59..5dee6e0f08 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUpgradeFromImage.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUpgradeFromImage.java @@ -89,8 +89,10 @@ private static class ReferenceFileInfo { static { upgradeConf = new HdfsConfiguration(); upgradeConf.setInt(DFSConfigKeys.DFS_DATANODE_SCAN_PERIOD_HOURS_KEY, -1); // block scanning off - if (System.getProperty("test.build.data") == null) { // to allow test to be run outside of Maven - System.setProperty("test.build.data", "build/test/data"); + if (System.getProperty(GenericTestUtils.SYSPROP_TEST_DATA_DIR) == null) { + // to allow test to be run outside of Maven + System.setProperty(GenericTestUtils.SYSPROP_TEST_DATA_DIR, + GenericTestUtils.DEFAULT_TEST_DATA_DIR); } } @@ -105,19 +107,19 @@ public interface ClusterVerifier { void unpackStorage(String tarFileName, String referenceName) throws IOException { - String tarFile = System.getProperty("test.cache.data", "build/test/cache") + String tarFile = System.getProperty("test.cache.data", "target/test/cache") + "/" + tarFileName; - String dataDir = System.getProperty("test.build.data", "build/test/data"); + File dataDir = GenericTestUtils.getTestDir(); File dfsDir = new File(dataDir, "dfs"); if ( dfsDir.exists() && !FileUtil.fullyDelete(dfsDir) ) { throw new IOException("Could not delete dfs directory '" + dfsDir + "'"); } LOG.info("Unpacking " + tarFile); - FileUtil.unTar(new File(tarFile), new File(dataDir)); + FileUtil.unTar(new File(tarFile), dataDir); //Now read the reference info BufferedReader reader = new BufferedReader(new FileReader( - System.getProperty("test.cache.data", "build/test/cache") + System.getProperty("test.cache.data", "target/test/cache") + "/" + referenceName)); String line; while ( (line = reader.readLine()) != null ) { @@ -631,10 +633,10 @@ public void testUpgradeFromRel1BBWImage() throws IOException { unpackStorage(HADOOP1_BBW_IMAGE, HADOOP_DFS_DIR_TXT); Configuration conf = new Configuration(upgradeConf); conf.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY, - System.getProperty("test.build.data") + File.separator + + GenericTestUtils.getTempPath( "dfs" + File.separator + "data" + File.separator + - "data1"); + "data1")); upgradeAndVerify(new MiniDFSCluster.Builder(conf). numDataNodes(1).enableManagedDfsDirsRedundancy(false). manageDataDfsDirs(false), null); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java index 845f5c86c5..448ef6a627 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDFSUtil.java @@ -874,8 +874,7 @@ private static void checkAllResults(Long[] toCheck, boolean shouldSucceed) { @Test public void testGetPassword() throws Exception { - File testDir = new File(System.getProperty("test.build.data", - "target/test-dir")); + File testDir = GenericTestUtils.getTestDir(); Configuration conf = new Configuration(); final Path jksPath = new Path(testDir.toString(), "test.jks"); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeLayoutUpgrade.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeLayoutUpgrade.java index 6b60b2bcb9..0e2f4e4ee0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeLayoutUpgrade.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeLayoutUpgrade.java @@ -19,6 +19,7 @@ package org.apache.hadoop.hdfs; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.test.GenericTestUtils; import org.junit.Test; import java.io.File; @@ -42,11 +43,11 @@ public void testUpgradeToIdBasedLayout() throws IOException { TestDFSUpgradeFromImage upgrade = new TestDFSUpgradeFromImage(); upgrade.unpackStorage(HADOOP24_DATANODE, HADOOP_DATANODE_DIR_TXT); Configuration conf = new Configuration(TestDFSUpgradeFromImage.upgradeConf); - conf.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY, - new File(System.getProperty("test.build.data"), + conf.set( + DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY, GenericTestUtils.getTestDir( "dfs" + File.separator + "data").toURI().toString()); - conf.set(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY, - new File(System.getProperty("test.build.data"), + conf.set( + DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY, GenericTestUtils.getTestDir( "dfs" + File.separator + "name").toURI().toString()); upgrade.upgradeAndVerify(new MiniDFSCluster.Builder(conf).numDataNodes(1) .manageDataDfsDirs(false).manageNameDfsDirs(false), null); @@ -61,12 +62,12 @@ public void testUpgradeFrom256To32Layout() throws IOException { TestDFSUpgradeFromImage upgrade = new TestDFSUpgradeFromImage(); upgrade.unpackStorage(HADOOP_56_DN_LAYOUT, HADOOP_56_DN_LAYOUT_TXT); Configuration conf = new Configuration(TestDFSUpgradeFromImage.upgradeConf); - conf.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY, - new File(System.getProperty("test.build.data"), "dfs" + File.separator - + "data").toURI().toString()); - conf.set(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY, - new File(System.getProperty("test.build.data"), "dfs" + File.separator - + "name").toURI().toString()); + conf.set( + DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY, GenericTestUtils.getTestDir( + "dfs" + File.separator + "data").toURI().toString()); + conf.set( + DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY, GenericTestUtils.getTestDir( + "dfs" + File.separator + "name").toURI().toString()); upgrade.upgradeAndVerify(new MiniDFSCluster.Builder(conf).numDataNodes(1) .manageDataDfsDirs(false).manageNameDfsDirs(false), null); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeStartupFixesLegacyStorageIDs.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeStartupFixesLegacyStorageIDs.java index e262abcb6a..659a8c162b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeStartupFixesLegacyStorageIDs.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDatanodeStartupFixesLegacyStorageIDs.java @@ -87,12 +87,12 @@ public void verifyClusterPostUpgrade(MiniDFSCluster cluster) throws IOException private static void initStorageDirs(final Configuration conf, final String testName) { - conf.set(DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY, - System.getProperty("test.build.data") + File.separator + - testName + File.separator + "dfs" + File.separator + "data"); - conf.set(DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY, - System.getProperty("test.build.data") + File.separator + - testName + File.separator + "dfs" + File.separator + "name"); + conf.set( + DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY, GenericTestUtils.getTempPath( + testName + File.separator + "dfs" + File.separator + "data")); + conf.set( + DFSConfigKeys.DFS_NAMENODE_NAME_DIR_KEY, GenericTestUtils.getTempPath( + testName + File.separator + "dfs" + File.separator + "name")); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFetchImage.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFetchImage.java index 6f6100302a..d8218b642d 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFetchImage.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestFetchImage.java @@ -32,13 +32,14 @@ import org.apache.hadoop.hdfs.tools.DFSAdmin; import org.apache.hadoop.hdfs.util.MD5FileUtils; import org.apache.hadoop.io.MD5Hash; +import org.apache.hadoop.test.GenericTestUtils; import org.junit.AfterClass; import org.junit.Test; public class TestFetchImage { - private static final File FETCHED_IMAGE_FILE = new File( - System.getProperty("test.build.dir"), "target/fetched-image-dir"); + private static final File FETCHED_IMAGE_FILE = + GenericTestUtils.getTestDir("target/fetched-image-dir"); // Shamelessly stolen from NNStorage. private static final Pattern IMAGE_REGEX = Pattern.compile("fsimage_(\\d+)"); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestPread.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestPread.java index 2789e33e65..c761225465 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestPread.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestPread.java @@ -482,7 +482,7 @@ public void testPreadLocalFS() throws IOException { Configuration conf = new HdfsConfiguration(); FileSystem fileSys = FileSystem.getLocal(conf); try { - Path file1 = new Path("build/test/data", "preadtest.dat"); + Path file1 = new Path(GenericTestUtils.getTempPath("preadtest.dat")); writeFile(fileSys, file1); pReadFile(fileSys, file1); cleanupFile(fileSys, file1); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestSeekBug.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestSeekBug.java index c9f5293e1d..9dd2987f55 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestSeekBug.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestSeekBug.java @@ -30,6 +30,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.IOUtils; +import org.apache.hadoop.test.GenericTestUtils; import org.junit.Test; /** @@ -204,7 +205,7 @@ public void testSeekBugLocalFS() throws IOException { Configuration conf = new HdfsConfiguration(); FileSystem fileSys = FileSystem.getLocal(conf); try { - Path file1 = new Path("build/test/data", "seektest.dat"); + Path file1 = new Path(GenericTestUtils.getTempPath("seektest.dat")); DFSTestUtil.createFile(fileSys, file1, ONEMB, ONEMB, fileSys.getDefaultBlockSize(file1), fileSys.getDefaultReplication(file1), seed); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferTestCase.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferTestCase.java index 37b2a2c060..4eade6ad5e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferTestCase.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocol/datatransfer/sasl/SaslDataTransferTestCase.java @@ -43,6 +43,7 @@ import org.apache.hadoop.security.SecurityUtil; import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; import org.apache.hadoop.security.ssl.KeyStoreTestUtil; +import org.apache.hadoop.test.GenericTestUtils; import org.junit.AfterClass; import org.junit.BeforeClass; @@ -74,8 +75,8 @@ public static String getHdfsKeytab() { @BeforeClass public static void initKdc() throws Exception { - baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"), - SaslDataTransferTestCase.class.getSimpleName()); + baseDir = GenericTestUtils + .getTestDir(SaslDataTransferTestCase.class.getSimpleName()); FileUtil.fullyDelete(baseDir); assertTrue(baseDir.mkdirs()); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/TestSecureNNWithQJM.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/TestSecureNNWithQJM.java index 197759edf5..9abfb9cfba 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/TestSecureNNWithQJM.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/qjournal/TestSecureNNWithQJM.java @@ -55,6 +55,7 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.security.UserGroupInformation.AuthenticationMethod; import org.apache.hadoop.security.ssl.KeyStoreTestUtil; +import org.apache.hadoop.test.GenericTestUtils; import org.junit.After; import org.junit.AfterClass; import org.junit.Before; @@ -82,8 +83,8 @@ public class TestSecureNNWithQJM { @BeforeClass public static void init() throws Exception { - baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"), - TestSecureNNWithQJM.class.getSimpleName()); + baseDir = + GenericTestUtils.getTestDir(TestSecureNNWithQJM.class.getSimpleName()); FileUtil.fullyDelete(baseDir); assertTrue(baseDir.mkdirs()); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/balancer/TestBalancer.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/balancer/TestBalancer.java index 2ad8edd957..738cfe6146 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/balancer/TestBalancer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/balancer/TestBalancer.java @@ -211,8 +211,7 @@ static void initConfWithStripe(Configuration conf) { } static void initSecureConf(Configuration conf) throws Exception { - baseDir = new File(System.getProperty("test.build.dir", "target/test-dir"), - TestBalancer.class.getSimpleName()); + baseDir = GenericTestUtils.getTestDir(TestBalancer.class.getSimpleName()); FileUtil.fullyDelete(baseDir); assertTrue(baseDir.mkdirs()); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeUUID.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeUUID.java index ebf7c35841..0d40fc72af 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeUUID.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataNodeUUID.java @@ -25,6 +25,7 @@ import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.MiniDFSCluster; +import org.apache.hadoop.test.GenericTestUtils; import org.junit.Test; import java.io.File; @@ -69,7 +70,7 @@ public void testDatanodeUuid() throws Exception { @Test(timeout = 10000) public void testUUIDRegeneration() throws Exception { - File baseDir = new File(System.getProperty("test.build.data")); + File baseDir = GenericTestUtils.getTestDir(); File disk1 = new File(baseDir, "disk1"); File disk2 = new File(baseDir, "disk2"); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataStorage.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataStorage.java index 405d2e967f..446a77b255 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataStorage.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataStorage.java @@ -47,8 +47,7 @@ public class TestDataStorage { private final static String BUILD_VERSION = "2.0"; private final static String SOFTWARE_VERSION = "2.0"; private final static long CTIME = 1; - private final static File TEST_DIR = - new File(System.getProperty("test.build.data") + "/dstest"); + private final static File TEST_DIR = GenericTestUtils.getTestDir("dstest"); private final static StartupOption START_OPT = StartupOption.REGULAR; private DataNode mockDN = Mockito.mock(DataNode.class); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCreateEditsLog.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCreateEditsLog.java index 67e5e330da..d3527f5ce3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCreateEditsLog.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestCreateEditsLog.java @@ -22,6 +22,7 @@ import java.io.File; +import org.apache.hadoop.test.GenericTestUtils; import org.junit.After; import org.junit.Before; import org.junit.Test; @@ -42,9 +43,8 @@ public class TestCreateEditsLog { private static final File HDFS_DIR = new File( MiniDFSCluster.getBaseDirectory()).getAbsoluteFile(); - private static final File TEST_DIR = new File( - System.getProperty("test.build.data", "build/test/data"), - "TestCreateEditsLog").getAbsoluteFile(); + private static final File TEST_DIR = + GenericTestUtils.getTestDir("TestCreateEditsLog"); private MiniDFSCluster cluster; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestDecommissioningStatus.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestDecommissioningStatus.java index 341933ed91..7c39bf884f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestDecommissioningStatus.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestDecommissioningStatus.java @@ -25,7 +25,6 @@ import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.Arrays; -import java.util.Iterator; import java.util.List; import java.util.Random; @@ -55,6 +54,7 @@ import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.datanode.DataNodeTestUtils; import org.apache.hadoop.hdfs.tools.DFSAdmin; +import org.apache.hadoop.hdfs.util.HostsFileWriter; import org.apache.log4j.Level; import org.apache.log4j.Logger; import org.junit.After; @@ -71,10 +71,8 @@ public class TestDecommissioningStatus { private static final int numDatanodes = 2; private static MiniDFSCluster cluster; private static FileSystem fileSys; - private static Path excludeFile; - private static FileSystem localFileSys; + private static HostsFileWriter hostsFileWriter; private static Configuration conf; - private static Path dir; final ArrayList decommissionedNodes = new ArrayList(numDatanodes); @@ -85,14 +83,8 @@ public void setUp() throws Exception { false); // Set up the hosts/exclude files. - localFileSys = FileSystem.getLocal(conf); - Path workingDir = localFileSys.getWorkingDirectory(); - dir = new Path(workingDir, "build/test/data/work-dir/decommission"); - assertTrue(localFileSys.mkdirs(dir)); - excludeFile = new Path(dir, "exclude"); - conf.set(DFSConfigKeys.DFS_HOSTS_EXCLUDE, excludeFile.toUri().getPath()); - Path includeFile = new Path(dir, "include"); - conf.set(DFSConfigKeys.DFS_HOSTS, includeFile.toUri().getPath()); + hostsFileWriter = new HostsFileWriter(); + hostsFileWriter.initialize(conf, "work-dir/decommission"); conf.setInt(DFSConfigKeys.DFS_NAMENODE_HEARTBEAT_RECHECK_INTERVAL_KEY, 1000); conf.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1); @@ -102,9 +94,6 @@ public void setUp() throws Exception { conf.setInt(DFSConfigKeys.DFS_NAMENODE_DECOMMISSION_INTERVAL_KEY, 1); conf.setLong(DFSConfigKeys.DFS_DATANODE_BALANCE_BANDWIDTHPERSEC_KEY, 1); - writeConfigFile(localFileSys, excludeFile, null); - writeConfigFile(localFileSys, includeFile, null); - cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDatanodes).build(); cluster.waitActive(); fileSys = cluster.getFileSystem(); @@ -115,31 +104,13 @@ public void setUp() throws Exception { @After public void tearDown() throws Exception { - if (localFileSys != null ) cleanupFile(localFileSys, dir); + if (hostsFileWriter != null) { + hostsFileWriter.cleanup(); + } if(fileSys != null) fileSys.close(); if(cluster != null) cluster.shutdown(); } - private static void writeConfigFile(FileSystem fs, Path name, - ArrayList nodes) throws IOException { - - // delete if it already exists - if (fs.exists(name)) { - fs.delete(name, true); - } - - FSDataOutputStream stm = fs.create(name); - - if (nodes != null) { - for (Iterator it = nodes.iterator(); it.hasNext();) { - String node = it.next(); - stm.writeBytes(node); - stm.writeBytes("\n"); - } - } - stm.close(); - } - private FSDataOutputStream writeIncompleteFile(FileSystem fileSys, Path name, short repl) throws IOException { // create and write a file that contains three blocks of data @@ -169,25 +140,25 @@ static private void cleanupFile(FileSystem fileSys, Path name) * Decommissions the node at the given index */ private String decommissionNode(FSNamesystem namesystem, DFSClient client, - FileSystem localFileSys, int nodeIndex) throws IOException { + int nodeIndex) throws IOException { DatanodeInfo[] info = client.datanodeReport(DatanodeReportType.LIVE); String nodename = info[nodeIndex].getXferAddr(); - decommissionNode(namesystem, localFileSys, nodename); + decommissionNode(namesystem, nodename); return nodename; } /* * Decommissions the node by name */ - private void decommissionNode(FSNamesystem namesystem, - FileSystem localFileSys, String dnName) throws IOException { + private void decommissionNode(FSNamesystem namesystem, String dnName) + throws IOException { System.out.println("Decommissioning node: " + dnName); // write nodename into the exclude file. ArrayList nodes = new ArrayList(decommissionedNodes); nodes.add(dnName); - writeConfigFile(localFileSys, excludeFile, nodes); + hostsFileWriter.initExcludeHosts(nodes.toArray(new String[0])); } private void checkDecommissionStatus(DatanodeDescriptor decommNode, @@ -280,7 +251,7 @@ public void testDecommissionStatus() throws Exception { FSNamesystem fsn = cluster.getNamesystem(); final DatanodeManager dm = fsn.getBlockManager().getDatanodeManager(); for (int iteration = 0; iteration < numDatanodes; iteration++) { - String downnode = decommissionNode(fsn, client, localFileSys, iteration); + String downnode = decommissionNode(fsn, client, iteration); dm.refreshNodes(conf); decommissionedNodes.add(downnode); BlockManagerTestUtil.recheckDecommissionState(dm); @@ -307,7 +278,7 @@ public void testDecommissionStatus() throws Exception { // Call refreshNodes on FSNamesystem with empty exclude file. // This will remove the datanodes from decommissioning list and // make them available again. - writeConfigFile(localFileSys, excludeFile, null); + hostsFileWriter.initExcludeHost(""); dm.refreshNodes(conf); st1.close(); cleanupFile(fileSys, file1); @@ -337,7 +308,7 @@ public void testDecommissionStatusAfterDNRestart() throws Exception { // Decommission the DN. FSNamesystem fsn = cluster.getNamesystem(); final DatanodeManager dm = fsn.getBlockManager().getDatanodeManager(); - decommissionNode(fsn, localFileSys, dnName); + decommissionNode(fsn, dnName); dm.refreshNodes(conf); // Stop the DN when decommission is in progress. @@ -384,7 +355,7 @@ public void testDecommissionStatusAfterDNRestart() throws Exception { // Call refreshNodes on FSNamesystem with empty exclude file. // This will remove the datanodes from decommissioning list and // make them available again. - writeConfigFile(localFileSys, excludeFile, null); + hostsFileWriter.initExcludeHost(""); dm.refreshNodes(conf); } @@ -405,7 +376,7 @@ public void testDecommissionDeadDN() throws Exception { FSNamesystem fsn = cluster.getNamesystem(); final DatanodeManager dm = fsn.getBlockManager().getDatanodeManager(); DatanodeDescriptor dnDescriptor = dm.getDatanode(dnID); - decommissionNode(fsn, localFileSys, dnName); + decommissionNode(fsn, dnName); dm.refreshNodes(conf); BlockManagerTestUtil.recheckDecommissionState(dm); assertTrue(dnDescriptor.isDecommissioned()); @@ -416,7 +387,7 @@ public void testDecommissionDeadDN() throws Exception { // Call refreshNodes on FSNamesystem with empty exclude file to remove the // datanode from decommissioning list and make it available again. - writeConfigFile(localFileSys, excludeFile, null); + hostsFileWriter.initExcludeHost(""); dm.refreshNodes(conf); } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestEditLogFileInputStream.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestEditLogFileInputStream.java index aecdc789c4..cd329a617e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestEditLogFileInputStream.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestEditLogFileInputStream.java @@ -87,8 +87,7 @@ public void testReadURL() throws Exception { @Test(timeout=60000) public void testScanCorruptEditLog() throws Exception { Configuration conf = new Configuration(); - File editLog = new File(System.getProperty( - "test.build.data", "/tmp"), "testCorruptEditLog"); + File editLog = new File(GenericTestUtils.getTempPath("testCorruptEditLog")); LOG.debug("Creating test edit log file: " + editLog); EditLogFileOutputStream elos = new EditLogFileOutputStream(conf, diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSImageWithSnapshot.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSImageWithSnapshot.java index 6be39509c9..82f5cfb169 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSImageWithSnapshot.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFSImageWithSnapshot.java @@ -69,8 +69,8 @@ public class TestFSImageWithSnapshot { private final Path dir = new Path("/TestSnapshot"); private static final String testDir = - System.getProperty("test.build.data", "build/test/data"); - + GenericTestUtils.getTestDir().getAbsolutePath(); + Configuration conf; MiniDFSCluster cluster; FSNamesystem fsn; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsck.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsck.java index 73ad885f66..859f72c29e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsck.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestFsck.java @@ -119,8 +119,8 @@ * A JUnit test for doing fsck */ public class TestFsck { - static final String auditLogFile = System.getProperty("test.build.dir", - "build/test") + "/TestFsck-audit.log"; + static final String AUDITLOG_FILE = + GenericTestUtils.getTempPath("TestFsck-audit.log"); // Pattern for: // allowed=true ugi=name ip=/address cmd=FSCK src=/ dst=null perm=null @@ -212,14 +212,15 @@ public void testFsck() throws Exception { /** Sets up log4j logger for auditlogs */ private void setupAuditLogs() throws IOException { - File file = new File(auditLogFile); + File file = new File(AUDITLOG_FILE); if (file.exists()) { file.delete(); } Logger logger = ((Log4JLogger) FSNamesystem.auditLog).getLogger(); logger.setLevel(Level.INFO); PatternLayout layout = new PatternLayout("%m%n"); - RollingFileAppender appender = new RollingFileAppender(layout, auditLogFile); + RollingFileAppender appender = + new RollingFileAppender(layout, AUDITLOG_FILE); logger.addAppender(appender); } @@ -231,7 +232,7 @@ private void verifyAuditLogs() throws IOException { BufferedReader reader = null; try { // Audit log should contain one getfileinfo and one fsck - reader = new BufferedReader(new FileReader(auditLogFile)); + reader = new BufferedReader(new FileReader(AUDITLOG_FILE)); String line; // one extra getfileinfo stems from resolving the path diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeHttpServer.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeHttpServer.java index 7dc719e564..27efea6d8b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeHttpServer.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeHttpServer.java @@ -32,6 +32,7 @@ import org.apache.hadoop.http.HttpConfig.Policy; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.ssl.KeyStoreTestUtil; +import org.apache.hadoop.test.GenericTestUtils; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; @@ -42,8 +43,8 @@ @RunWith(value = Parameterized.class) public class TestNameNodeHttpServer { - private static final String BASEDIR = System.getProperty("test.build.dir", - "target/test-dir") + "/" + TestNameNodeHttpServer.class.getSimpleName(); + private static final String BASEDIR = GenericTestUtils + .getTempPath(TestNameNodeHttpServer.class.getSimpleName()); private static String keystoresDir; private static String sslConfDir; private static Configuration conf; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeRecovery.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeRecovery.java index 87e2523cd9..877f43cde3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeRecovery.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeRecovery.java @@ -49,6 +49,7 @@ import org.apache.hadoop.hdfs.server.namenode.FSEditLogOp.OpInstanceCache; import org.apache.hadoop.hdfs.server.namenode.NNStorage.NameNodeDirType; import org.apache.hadoop.io.IOUtils; +import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.test.PathUtils; import org.apache.hadoop.util.StringUtils; import org.junit.Test; @@ -521,8 +522,8 @@ static void setupRecoveryTestConf(Configuration conf) throws IOException { conf.set(DFSConfigKeys.DFS_HA_NAMENODE_ID_KEY, "nn1"); conf.set(DFSUtil.addKeySuffixes(DFSConfigKeys.DFS_HA_NAMENODES_KEY_PREFIX, "ns1"), "nn1,nn2"); - String baseDir = System.getProperty( - MiniDFSCluster.PROP_TEST_BUILD_DATA, "build/test/data") + "/dfs/"; + String baseDir = GenericTestUtils.getTestDir("setupRecoveryTestConf") + .getAbsolutePath(); File nameDir = new File(baseDir, "nameR"); File secondaryDir = new File(baseDir, "namesecondaryR"); conf.set(DFSUtil.addKeySuffixes(DFSConfigKeys. diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeRespectsBindHostKeys.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeRespectsBindHostKeys.java index 0c65360499..21d99a3fa6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeRespectsBindHostKeys.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestNameNodeRespectsBindHostKeys.java @@ -28,6 +28,7 @@ import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.security.ssl.KeyStoreTestUtil; +import org.apache.hadoop.test.GenericTestUtils; import java.io.File; import java.io.IOException; @@ -237,8 +238,8 @@ public void testHttpBindHostKey() throws IOException { } } - private static final String BASEDIR = System.getProperty("test.build.dir", - "target/test-dir") + "/" + TestNameNodeRespectsBindHostKeys.class.getSimpleName(); + private static final String BASEDIR = GenericTestUtils + .getTempPath(TestNameNodeRespectsBindHostKeys.class.getSimpleName()); private static void setupSsl() throws Exception { Configuration conf = new Configuration(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestRenameWithSnapshots.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestRenameWithSnapshots.java index 827feb6a2a..ad3a5a1967 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestRenameWithSnapshots.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestRenameWithSnapshots.java @@ -95,7 +95,7 @@ public class TestRenameWithSnapshots { private static FSDirectory fsdir; private static DistributedFileSystem hdfs; private static final String testDir = - System.getProperty("test.build.data", "build/test/data"); + GenericTestUtils.getTestDir().getAbsolutePath(); static private final Path dir = new Path("/testRenameWithSnapshots"); static private final Path sub1 = new Path(dir, "sub1"); static private final Path file1 = new Path(sub1, "file1"); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshot.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshot.java index 6313ce1aa4..4a8fdc828a 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshot.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/snapshot/TestSnapshot.java @@ -98,8 +98,8 @@ public class TestSnapshot { protected DistributedFileSystem hdfs; private static final String testDir = - System.getProperty("test.build.data", "build/test/data"); - + GenericTestUtils.getTestDir().getAbsolutePath(); + @Rule public ExpectedException exception = ExpectedException.none(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java index 94ce6b22fc..942719e1fc 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/tools/TestGetConf.java @@ -35,20 +35,18 @@ import java.net.InetSocketAddress; import java.util.ArrayList; import java.util.Arrays; -import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.StringTokenizer; -import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.DFSUtil.ConfiguredNNAddress; import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.tools.GetConf.Command; import org.apache.hadoop.hdfs.tools.GetConf.CommandHandler; +import org.apache.hadoop.hdfs.util.HostsFileWriter; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.util.ToolRunner; import org.junit.Test; @@ -390,42 +388,29 @@ public void testTool() throws Exception { public void TestGetConfExcludeCommand() throws Exception{ HdfsConfiguration conf = new HdfsConfiguration(); // Set up the hosts/exclude files. - localFileSys = FileSystem.getLocal(conf); - Path workingDir = localFileSys.getWorkingDirectory(); - Path dir = new Path(workingDir, System.getProperty("test.build.data", "target/test/data") + "/Getconf/"); - Path hostsFile = new Path(dir, "hosts"); - Path excludeFile = new Path(dir, "exclude"); - - // Setup conf - conf.set(DFSConfigKeys.DFS_HOSTS, hostsFile.toUri().getPath()); - conf.set(DFSConfigKeys.DFS_HOSTS_EXCLUDE, excludeFile.toUri().getPath()); - writeConfigFile(hostsFile, null); - writeConfigFile(excludeFile, null); + HostsFileWriter hostsFileWriter = new HostsFileWriter(); + hostsFileWriter.initialize(conf, "GetConf"); + Path excludeFile = hostsFileWriter.getExcludeFile(); + String[] args = {"-excludeFile"}; String ret = runTool(conf, args, true); assertEquals(excludeFile.toUri().getPath(),ret.trim()); - cleanupFile(localFileSys, excludeFile.getParent()); + hostsFileWriter.cleanup(); } @Test public void TestGetConfIncludeCommand() throws Exception{ HdfsConfiguration conf = new HdfsConfiguration(); // Set up the hosts/exclude files. - localFileSys = FileSystem.getLocal(conf); - Path workingDir = localFileSys.getWorkingDirectory(); - Path dir = new Path(workingDir, System.getProperty("test.build.data", "target/test/data") + "/Getconf/"); - Path hostsFile = new Path(dir, "hosts"); - Path excludeFile = new Path(dir, "exclude"); - + HostsFileWriter hostsFileWriter = new HostsFileWriter(); + hostsFileWriter.initialize(conf, "GetConf"); + Path hostsFile = hostsFileWriter.getIncludeFile(); + // Setup conf - conf.set(DFSConfigKeys.DFS_HOSTS, hostsFile.toUri().getPath()); - conf.set(DFSConfigKeys.DFS_HOSTS_EXCLUDE, excludeFile.toUri().getPath()); - writeConfigFile(hostsFile, null); - writeConfigFile(excludeFile, null); String[] args = {"-includeFile"}; String ret = runTool(conf, args, true); assertEquals(hostsFile.toUri().getPath(),ret.trim()); - cleanupFile(localFileSys, excludeFile.getParent()); + hostsFileWriter.cleanup(); } @Test @@ -443,29 +428,4 @@ public void testIncludeInternalNameServices() throws Exception { verifyAddresses(conf, TestType.NAMENODE, false, includedNN); verifyAddresses(conf, TestType.NNRPCADDRESSES, true, includedNN); } - - private void writeConfigFile(Path name, ArrayList nodes) - throws IOException { - // delete if it already exists - if (localFileSys.exists(name)) { - localFileSys.delete(name, true); - } - - FSDataOutputStream stm = localFileSys.create(name); - - if (nodes != null) { - for (Iterator it = nodes.iterator(); it.hasNext();) { - String node = it.next(); - stm.writeBytes(node); - stm.writeBytes("\n"); - } - } - stm.close(); - } - - private void cleanupFile(FileSystem fileSys, Path name) throws IOException { - assertTrue(fileSys.exists(name)); - fileSys.delete(name, true); - assertTrue(!fileSys.exists(name)); - } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/HostsFileWriter.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/HostsFileWriter.java index cd5ae95497..2ef0b8f2bc 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/HostsFileWriter.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/HostsFileWriter.java @@ -73,16 +73,26 @@ public void initialize(Configuration conf, String dir) throws IOException { } public void initExcludeHost(String hostNameAndPort) throws IOException { + initExcludeHosts(hostNameAndPort); + } + + public void initExcludeHosts(String... hostNameAndPorts) throws IOException { + StringBuilder excludeHosts = new StringBuilder(); if (isLegacyHostsFile) { - DFSTestUtil.writeFile(localFileSys, excludeFile, hostNameAndPort); + for (String hostNameAndPort : hostNameAndPorts) { + excludeHosts.append(hostNameAndPort).append("\n"); + } + DFSTestUtil.writeFile(localFileSys, excludeFile, excludeHosts.toString()); } else { - DatanodeAdminProperties dn = new DatanodeAdminProperties(); - String [] hostAndPort = hostNameAndPort.split(":"); - dn.setHostName(hostAndPort[0]); - dn.setPort(Integer.parseInt(hostAndPort[1])); - dn.setAdminState(AdminStates.DECOMMISSIONED); HashSet allDNs = new HashSet<>(); - allDNs.add(dn); + for (String hostNameAndPort : hostNameAndPorts) { + DatanodeAdminProperties dn = new DatanodeAdminProperties(); + String[] hostAndPort = hostNameAndPort.split(":"); + dn.setHostName(hostAndPort[0]); + dn.setPort(Integer.parseInt(hostAndPort[1])); + dn.setAdminState(AdminStates.DECOMMISSIONED); + allDNs.add(dn); + } CombinedHostsFileWriter.writeFile(combinedFile.toString(), allDNs); } } @@ -119,4 +129,12 @@ public void cleanup() throws IOException { FileUtils.deleteQuietly(new File(fullDir.toUri().getPath())); } } + + public Path getIncludeFile() { + return includeFile; + } + + public Path getExcludeFile() { + return excludeFile; + } } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestCombinedHostsFileReader.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestCombinedHostsFileReader.java index c3946e412b..923cf66fea 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestCombinedHostsFileReader.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/util/TestCombinedHostsFileReader.java @@ -23,6 +23,7 @@ import java.util.Set; import org.apache.hadoop.hdfs.protocol.DatanodeAdminProperties; +import org.apache.hadoop.test.GenericTestUtils; import org.junit.Before; import org.junit.After; import org.junit.Test; @@ -35,8 +36,8 @@ public class TestCombinedHostsFileReader { // Using /test/build/data/tmp directory to store temporary files - static final String HOSTS_TEST_DIR = new File(System.getProperty( - "test.build.data", "/tmp")).getAbsolutePath(); + static final String HOSTS_TEST_DIR = GenericTestUtils.getTestDir() + .getAbsolutePath(); File NEW_FILE = new File(HOSTS_TEST_DIR, "dfs.hosts.new.json"); static final String TEST_CACHE_DATA_DIR = diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java index 0f0ac3bf10..34bb336693 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestHttpsFileSystem.java @@ -32,14 +32,15 @@ import org.apache.hadoop.http.HttpConfig; import org.apache.hadoop.net.NetUtils; import org.apache.hadoop.security.ssl.KeyStoreTestUtil; +import org.apache.hadoop.test.GenericTestUtils; import org.junit.AfterClass; import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; public class TestHttpsFileSystem { - private static final String BASEDIR = System.getProperty("test.build.dir", - "target/test-dir") + "/" + TestHttpsFileSystem.class.getSimpleName(); + private static final String BASEDIR = + GenericTestUtils.getTempPath(TestHttpsFileSystem.class.getSimpleName()); private static MiniDFSCluster cluster; private static Configuration conf; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTokens.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTokens.java index cb1efaeee0..6192ad927f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTokens.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsTokens.java @@ -55,6 +55,7 @@ import org.apache.hadoop.security.authentication.client.ConnectionConfigurator; import org.apache.hadoop.security.ssl.KeyStoreTestUtil; import org.apache.hadoop.security.token.SecretManager.InvalidToken; +import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.security.token.Token; import org.junit.Assert; import org.junit.BeforeClass; @@ -199,8 +200,8 @@ public void testLazyTokenFetchForSWebhdfs() throws Exception { SecurityUtil.setAuthenticationMethod(SIMPLE, clusterConf); clusterConf.setBoolean(DFSConfigKeys .DFS_NAMENODE_DELEGATION_TOKEN_ALWAYS_USE_KEY, true); - String BASEDIR = System.getProperty("test.build.dir", - "target/test-dir") + "/" + TestWebHdfsTokens.class.getSimpleName(); + String baseDir = + GenericTestUtils.getTempPath(TestWebHdfsTokens.class.getSimpleName()); String keystoresDir; String sslConfDir; @@ -208,10 +209,10 @@ public void testLazyTokenFetchForSWebhdfs() throws Exception { clusterConf.set(DFSConfigKeys.DFS_NAMENODE_HTTPS_ADDRESS_KEY, "localhost:0"); clusterConf.set(DFSConfigKeys.DFS_DATANODE_HTTPS_ADDRESS_KEY, "localhost:0"); - File base = new File(BASEDIR); + File base = new File(baseDir); FileUtil.fullyDelete(base); base.mkdirs(); - keystoresDir = new File(BASEDIR).getAbsolutePath(); + keystoresDir = new File(baseDir).getAbsolutePath(); sslConfDir = KeyStoreTestUtil.getClasspathDir(TestWebHdfsTokens.class); KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfDir, clusterConf, false); clusterConf.set(DFSConfigKeys.DFS_CLIENT_HTTPS_KEYSTORE_RESOURCE_KEY, diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/test/PathUtils.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/test/PathUtils.java index ac0f632145..d54ea651ef 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/test/PathUtils.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/test/PathUtils.java @@ -19,7 +19,6 @@ import java.io.File; -import org.apache.commons.lang.RandomStringUtils; import org.apache.hadoop.fs.Path; public class PathUtils { @@ -37,10 +36,8 @@ public static File getTestDir(Class caller) { } public static File getTestDir(Class caller, boolean create) { - File dir = - new File(System.getProperty("test.build.data", "target/test/data") - + "/" + RandomStringUtils.randomAlphanumeric(10), - caller.getSimpleName()); + File dir = new File(GenericTestUtils.getRandomizedTestDir(), + caller.getSimpleName()); if (create) { dir.mkdirs(); }