From e04bcb3a061f7d89fb1353cd5ef6f550c049f36c Mon Sep 17 00:00:00 2001 From: Ahmed Hussein <50450311+amahussein@users.noreply.github.com> Date: Fri, 26 Feb 2021 13:42:33 -0600 Subject: [PATCH] MAPREDUCE-7320. organize test directories for ClusterMapReduceTestCase (#2722). Contributed by Ahmed Hussein --- .../apache/hadoop/test/GenericTestUtils.java | 16 +++++ .../org/apache/hadoop/util/JarFinder.java | 8 ++- .../mapred/ClusterMapReduceTestCase.java | 62 +++++++------------ .../mapred/MiniMRClientClusterFactory.java | 5 +- .../apache/hadoop/mapred/TestBadRecords.java | 9 ++- .../mapred/TestClusterMapReduceTestCase.java | 9 ++- .../org/apache/hadoop/mapred/TestJobName.java | 7 +++ .../hadoop/mapred/TestMRCJCJobClient.java | 9 ++- .../hadoop/mapreduce/TestMRJobClient.java | 6 ++ .../security/ssl/TestEncryptedShuffle.java | 30 ++++----- .../mapreduce/v2/MiniMRYarnCluster.java | 5 ++ .../streaming/TestStreamingBadRecords.java | 9 ++- .../hadoop/yarn/server/MiniYARNCluster.java | 10 ++- 13 files changed, 118 insertions(+), 67 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/GenericTestUtils.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/GenericTestUtils.java index ade6cb4c7b..e266f28568 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/GenericTestUtils.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/test/GenericTestUtils.java @@ -229,6 +229,22 @@ public static int uniqueSequenceId() { return sequence.incrementAndGet(); } + /** + * Creates a directory for the data/logs of the unit test. + * It first deletes the directory if it exists. + * + * @param testClass the unit test class. + * @return the Path of the root directory. + */ + public static File setupTestRootDir(Class testClass) { + File testRootDir = getTestDir(testClass.getSimpleName()); + if (testRootDir.exists()) { + FileUtil.fullyDelete(testRootDir); + } + testRootDir.mkdirs(); + return testRootDir; + } + /** * Get the (created) base directory for tests. * @return the absolute directory diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/JarFinder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/JarFinder.java index 5e0bfc2399..3f1bb2d61d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/JarFinder.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/util/JarFinder.java @@ -132,6 +132,10 @@ private static void createJar(File dir, File jarFile) throws IOException { * @return path to the Jar containing the class. */ public static String getJar(Class klass) { + return getJar(klass, null); + } + + public static String getJar(Class klass, String testSubDir) { Preconditions.checkNotNull(klass, "klass"); ClassLoader loader = klass.getClassLoader(); if (loader != null) { @@ -154,7 +158,9 @@ else if ("file".equals(url.getProtocol())) { klassName = klassName.replace(".", "/") + ".class"; path = path.substring(0, path.length() - klassName.length()); File baseDir = new File(path); - File testDir = GenericTestUtils.getTestDir(); + File testDir = + testSubDir == null ? GenericTestUtils.getTestDir() + : GenericTestUtils.getTestDir(testSubDir); testDir = testDir.getAbsoluteFile(); if (!testDir.exists()) { testDir.mkdirs(); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java index 8d33b1580a..f16b8a0f18 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/ClusterMapReduceTestCase.java @@ -20,9 +20,12 @@ import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; +import org.apache.hadoop.test.GenericTestUtils; + import org.junit.After; import org.junit.Before; +import java.io.File; import java.io.IOException; import java.util.Map; import java.util.Properties; @@ -43,8 +46,18 @@ * The DFS filesystem is formated before the testcase starts and after it ends. */ public abstract class ClusterMapReduceTestCase { + private static File testRootDir; + private static File dfsFolder; + private MiniDFSCluster dfsCluster = null; - private MiniMRCluster mrCluster = null; + private MiniMRClientCluster mrCluster = null; + + protected static void setupClassBase(Class testClass) throws Exception { + // setup the test root directory + testRootDir = GenericTestUtils.setupTestRootDir(testClass); + dfsFolder = new File(testRootDir, "dfs"); + } + /** * Creates Hadoop Cluster and DFS before a test case is run. @@ -78,37 +91,10 @@ protected synchronized void startCluster(boolean reformatDFS, Properties props) conf.set((String) entry.getKey(), (String) entry.getValue()); } } - dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2) - .format(reformatDFS).racks(null).build(); - - ConfigurableMiniMRCluster.setConfiguration(props); - //noinspection deprecation - mrCluster = new ConfigurableMiniMRCluster(2, - getFileSystem().getUri().toString(), 1, conf); - } - } - - private static class ConfigurableMiniMRCluster extends MiniMRCluster { - private static Properties config; - - public static void setConfiguration(Properties props) { - config = props; - } - - public ConfigurableMiniMRCluster(int numTaskTrackers, String namenode, - int numDir, JobConf conf) - throws Exception { - super(0,0, numTaskTrackers, namenode, numDir, null, null, null, conf); - } - - public JobConf createJobConf() { - JobConf conf = super.createJobConf(); - if (config != null) { - for (Map.Entry entry : config.entrySet()) { - conf.set((String) entry.getKey(), (String) entry.getValue()); - } - } - return conf; + dfsCluster = + new MiniDFSCluster.Builder(conf, dfsFolder) + .numDataNodes(2).format(reformatDFS).racks(null).build(); + mrCluster = MiniMRClientClusterFactory.create(this.getClass(), 2, conf); } } @@ -125,7 +111,7 @@ public JobConf createJobConf() { */ protected void stopCluster() throws Exception { if (mrCluster != null) { - mrCluster.shutdown(); + mrCluster.stop(); mrCluster = null; } if (dfsCluster != null) { @@ -157,17 +143,13 @@ protected FileSystem getFileSystem() throws IOException { return dfsCluster.getFileSystem(); } - protected MiniMRCluster getMRCluster() { - return mrCluster; - } - /** * Returns the path to the root directory for the testcase. * * @return path to the root directory for the testcase. */ protected Path getTestRootDir() { - return new Path("x").getParent(); + return new Path(testRootDir.getPath()); } /** @@ -194,8 +176,8 @@ protected Path getOutputDir() { * * @return configuration that works on the testcase Hadoop instance */ - protected JobConf createJobConf() { - return mrCluster.createJobConf(); + protected JobConf createJobConf() throws IOException { + return new JobConf(mrCluster.getConfig()); } } diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MiniMRClientClusterFactory.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MiniMRClientClusterFactory.java index 85c534bfb8..33b85b9204 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MiniMRClientClusterFactory.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/MiniMRClientClusterFactory.java @@ -55,7 +55,8 @@ public static MiniMRClientCluster create(Class caller, String identifier, Path appJar = new Path(testRootDir, "MRAppJar.jar"); // Copy MRAppJar and make it private. - Path appMasterJar = new Path(MiniMRYarnCluster.APPJAR); + Path appMasterJar = + new Path(MiniMRYarnCluster.copyAppJarIntoTestDir(identifier)); fs.copyFromLocalFile(appMasterJar, appJar); fs.setPermission(appJar, new FsPermission("744")); @@ -64,7 +65,7 @@ public static MiniMRClientCluster create(Class caller, String identifier, job.addFileToClassPath(appJar); - Path callerJar = new Path(JarFinder.getJar(caller)); + Path callerJar = new Path(JarFinder.getJar(caller, identifier)); Path remoteCallerJar = new Path(testRootDir, callerJar.getName()); fs.copyFromLocalFile(callerJar, remoteCallerJar); fs.setPermission(remoteCallerJar, new FsPermission("744")); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java index b45a2a6f8c..1b39583bd7 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestBadRecords.java @@ -37,6 +37,8 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.mapreduce.TaskCounter; import org.apache.hadoop.util.ReflectionUtils; + +import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Test; import org.slf4j.Logger; @@ -58,7 +60,12 @@ public class TestBadRecords extends ClusterMapReduceTestCase { Arrays.asList("hello08","hello10"); private List input; - + + @BeforeClass + public static void setupClass() throws Exception { + setupClassBase(TestBadRecords.class); + } + public TestBadRecords() { input = new ArrayList(); for(int i=1;i<=10;i++) { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClusterMapReduceTestCase.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClusterMapReduceTestCase.java index f04fbd7a29..b4e8de2723 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClusterMapReduceTestCase.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestClusterMapReduceTestCase.java @@ -29,6 +29,8 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; + +import org.junit.BeforeClass; import org.junit.Test; import static org.junit.Assert.assertTrue; @@ -36,6 +38,12 @@ import static org.junit.Assert.assertNull; import static org.junit.Assert.assertFalse; public class TestClusterMapReduceTestCase extends ClusterMapReduceTestCase { + + @BeforeClass + public static void setupClass() throws Exception { + setupClassBase(TestClusterMapReduceTestCase.class); + } + public void _testMapReduce(boolean restart) throws Exception { OutputStream os = getFileSystem().create(new Path(getInputDir(), "text.txt")); Writer wr = new OutputStreamWriter(os); @@ -88,7 +96,6 @@ public void _testMapReduce(boolean restart) throws Exception { reader.close(); assertEquals(4, counter); } - } @Test diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobName.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobName.java index 2659a14a70..f50089af4a 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobName.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestJobName.java @@ -29,12 +29,19 @@ import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.lib.IdentityMapper; + +import org.junit.BeforeClass; import org.junit.Test; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertNull; public class TestJobName extends ClusterMapReduceTestCase { + @BeforeClass + public static void setupClass() throws Exception { + setupClassBase(TestJobName.class); + } + @Test public void testComplexName() throws Exception { OutputStream os = getFileSystem().create(new Path(getInputDir(), diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobClient.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobClient.java index 9a2af0c7be..9a2c744d8c 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobClient.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapred/TestMRCJCJobClient.java @@ -29,10 +29,17 @@ import org.apache.hadoop.mapreduce.TestMRJobClient; import org.apache.hadoop.mapreduce.tools.CLI; import org.apache.hadoop.util.Tool; + +import org.junit.BeforeClass; import org.junit.Ignore; @Ignore public class TestMRCJCJobClient extends TestMRJobClient { - + + @BeforeClass + public static void setupClass() throws Exception { + setupClassBase(TestMRCJCJobClient.class); + } + private String runJob() throws Exception { OutputStream os = getFileSystem().create(new Path(getInputDir(), "text.txt")); diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java index f4ccc569af..31b90aa0e5 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/TestMRJobClient.java @@ -30,6 +30,7 @@ import org.apache.hadoop.util.ToolRunner; import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONObject; +import org.junit.BeforeClass; import org.junit.Test; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -63,6 +64,11 @@ public class TestMRJobClient extends ClusterMapReduceTestCase { private static final Logger LOG = LoggerFactory.getLogger(TestMRJobClient.class); + @BeforeClass + public static void setupClass() throws Exception { + setupClassBase(TestMRJobClient.class); + } + private Job runJob(Configuration conf) throws Exception { String input = "hello1\nhello2\nhello3\n"; diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/ssl/TestEncryptedShuffle.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/ssl/TestEncryptedShuffle.java index d870d25b9a..ed80f658f6 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/ssl/TestEncryptedShuffle.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/security/ssl/TestEncryptedShuffle.java @@ -19,7 +19,6 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; -import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.MiniDFSCluster; @@ -31,58 +30,55 @@ import org.apache.hadoop.mapreduce.MRConfig; import org.apache.hadoop.security.ssl.KeyStoreTestUtil; +import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.StringUtils; +import org.apache.hadoop.util.Time; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.junit.After; -import org.junit.AfterClass; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; import org.junit.Assert; -import java.io.BufferedReader; import java.io.File; -import java.io.FileReader; import java.io.FileWriter; import java.io.IOException; import java.io.OutputStreamWriter; import java.io.Writer; -import java.net.URL; public class TestEncryptedShuffle { - private static final String BASEDIR = - System.getProperty("test.build.dir", "target/test-dir") + "/" + - TestEncryptedShuffle.class.getSimpleName(); - - private String classpathDir; + private static File testRootDir; @BeforeClass public static void setUp() throws Exception { - File base = new File(BASEDIR); - FileUtil.fullyDelete(base); - base.mkdirs(); + testRootDir = + GenericTestUtils.setupTestRootDir(TestEncryptedShuffle.class); } @Before public void createCustomYarnClasspath() throws Exception { classpathDir = KeyStoreTestUtil.getClasspathDir(TestEncryptedShuffle.class); new File(classpathDir, "core-site.xml").delete(); + dfsFolder = new File(testRootDir, String.format("dfs-%d", + Time.monotonicNow())); } @After public void cleanUpMiniClusterSpecialConfig() throws Exception { new File(classpathDir, "core-site.xml").delete(); - String keystoresDir = new File(BASEDIR).getAbsolutePath(); + String keystoresDir = testRootDir.getAbsolutePath(); KeyStoreTestUtil.cleanupSSLConfig(keystoresDir, classpathDir); } + private String classpathDir; private MiniDFSCluster dfsCluster = null; private MiniMRClientCluster mrCluster = null; + private File dfsFolder; private void startCluster(Configuration conf) throws Exception { if (System.getProperty("hadoop.log.dir") == null) { - System.setProperty("hadoop.log.dir", "target/test-dir"); + System.setProperty("hadoop.log.dir", testRootDir.getAbsolutePath()); } conf.set("dfs.block.access.token.enable", "false"); conf.set("dfs.permissions", "true"); @@ -92,7 +88,7 @@ private void startCluster(Configuration conf) throws Exception { YarnConfiguration.DEFAULT_YARN_CROSS_PLATFORM_APPLICATION_CLASSPATH)) + File.pathSeparator + classpathDir; conf.set(YarnConfiguration.YARN_APPLICATION_CLASSPATH, cp); - dfsCluster = new MiniDFSCluster.Builder(conf).build(); + dfsCluster = new MiniDFSCluster.Builder(conf, dfsFolder).build(); FileSystem fileSystem = dfsCluster.getFileSystem(); fileSystem.mkdirs(new Path("/tmp")); fileSystem.mkdirs(new Path("/user")); @@ -129,7 +125,7 @@ private void encryptedShuffleWithCerts(boolean useClientCerts) throws Exception { try { Configuration conf = new Configuration(); - String keystoresDir = new File(BASEDIR).getAbsolutePath(); + String keystoresDir = testRootDir.getAbsolutePath(); String sslConfsDir = KeyStoreTestUtil.getClasspathDir(TestEncryptedShuffle.class); KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfsDir, conf, diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java index dbd87e2491..e41c95c490 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/MiniMRYarnCluster.java @@ -78,6 +78,7 @@ public MiniMRYarnCluster(String testName) { this(testName, 1); } + @SuppressWarnings("deprecation") public MiniMRYarnCluster(String testName, int noOfNMs) { this(testName, noOfNMs, false); } @@ -87,6 +88,10 @@ public MiniMRYarnCluster(String testName, int noOfNMs, boolean enableAHS) { super(testName, 1, noOfNMs, 4, 4, enableAHS); } + public static String copyAppJarIntoTestDir(String testSubdir) { + return JarFinder.getJar(LocalContainerLauncher.class, testSubdir); + } + public static String getResolvedMRHistoryWebAppURLWithoutScheme( Configuration conf, boolean isSSLEnabled) { InetSocketAddress address = null; diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java index 0ef1ff0b32..5a4e3a960d 100644 --- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java +++ b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/streaming/TestStreamingBadRecords.java @@ -31,13 +31,13 @@ import java.util.Properties; import java.util.StringTokenizer; +import org.junit.BeforeClass; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; import org.apache.hadoop.mapred.ClusterMapReduceTestCase; import org.apache.hadoop.mapred.Counters; -import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.RunningJob; import org.apache.hadoop.mapred.SkipBadRecords; import org.apache.hadoop.mapred.Utils; @@ -65,7 +65,12 @@ public class TestStreamingBadRecords extends ClusterMapReduceTestCase private static final String badReducer = UtilTest.makeJavaCommand(BadApp.class, new String[]{"true"}); private static final int INPUTSIZE=100; - + + @BeforeClass + public static void setupClass() throws Exception { + setupClassBase(TestStreamingBadRecords.class); + } + public TestStreamingBadRecords() throws IOException { UtilTest utilTest = new UtilTest(getClass().getName()); diff --git a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java index 352a2d0d64..828b5c43af 100644 --- a/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java +++ b/hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-tests/src/test/java/org/apache/hadoop/yarn/server/MiniYARNCluster.java @@ -41,8 +41,10 @@ import org.apache.hadoop.net.ServerSocketUtil; import org.apache.hadoop.service.AbstractService; import org.apache.hadoop.service.CompositeService; +import org.apache.hadoop.test.GenericTestUtils; import org.apache.hadoop.util.Shell; import org.apache.hadoop.util.Shell.ShellCommandExecutor; +import org.apache.hadoop.util.Time; import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest; import org.apache.hadoop.yarn.api.records.ApplicationAttemptId; import org.apache.hadoop.yarn.conf.HAUtil; @@ -171,8 +173,11 @@ public MiniYARNCluster( this.numLocalDirs = numLocalDirs; this.numLogDirs = numLogDirs; this.enableAHS = enableAHS; - String testSubDir = testName.replace("$", ""); - File targetWorkDir = new File("target", testSubDir); + String yarnFolderName = String.format("yarn-%d", Time.monotonicNow()); + File targetWorkDirRoot = GenericTestUtils.getTestDir(getName()); + // make sure that the folder exists + targetWorkDirRoot.mkdirs(); + File targetWorkDir = new File(targetWorkDirRoot, yarnFolderName); try { FileContext.getLocalFSFileContext().delete( new Path(targetWorkDir.getAbsolutePath()), true); @@ -227,6 +232,7 @@ public MiniYARNCluster( * @param numLocalDirs the number of nm-local-dirs per nodemanager * @param numLogDirs the number of nm-log-dirs per nodemanager */ + @SuppressWarnings("deprecation") public MiniYARNCluster( String testName, int numResourceManagers, int numNodeManagers, int numLocalDirs, int numLogDirs) {