MAPREDUCE-7320. organize test directories for ClusterMapReduceTestCase (#2722). Contributed by Ahmed Hussein
(cherry picked from commit e04bcb3a06
)
This commit is contained in:
parent
e4dcc31114
commit
792329fde9
@ -229,6 +229,22 @@ public static int uniqueSequenceId() {
|
|||||||
return sequence.incrementAndGet();
|
return sequence.incrementAndGet();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Creates a directory for the data/logs of the unit test.
|
||||||
|
* It first deletes the directory if it exists.
|
||||||
|
*
|
||||||
|
* @param testClass the unit test class.
|
||||||
|
* @return the Path of the root directory.
|
||||||
|
*/
|
||||||
|
public static File setupTestRootDir(Class<?> testClass) {
|
||||||
|
File testRootDir = getTestDir(testClass.getSimpleName());
|
||||||
|
if (testRootDir.exists()) {
|
||||||
|
FileUtil.fullyDelete(testRootDir);
|
||||||
|
}
|
||||||
|
testRootDir.mkdirs();
|
||||||
|
return testRootDir;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the (created) base directory for tests.
|
* Get the (created) base directory for tests.
|
||||||
* @return the absolute directory
|
* @return the absolute directory
|
||||||
|
@ -132,6 +132,10 @@ private static void createJar(File dir, File jarFile) throws IOException {
|
|||||||
* @return path to the Jar containing the class.
|
* @return path to the Jar containing the class.
|
||||||
*/
|
*/
|
||||||
public static String getJar(Class klass) {
|
public static String getJar(Class klass) {
|
||||||
|
return getJar(klass, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String getJar(Class klass, String testSubDir) {
|
||||||
Preconditions.checkNotNull(klass, "klass");
|
Preconditions.checkNotNull(klass, "klass");
|
||||||
ClassLoader loader = klass.getClassLoader();
|
ClassLoader loader = klass.getClassLoader();
|
||||||
if (loader != null) {
|
if (loader != null) {
|
||||||
@ -154,7 +158,9 @@ else if ("file".equals(url.getProtocol())) {
|
|||||||
klassName = klassName.replace(".", "/") + ".class";
|
klassName = klassName.replace(".", "/") + ".class";
|
||||||
path = path.substring(0, path.length() - klassName.length());
|
path = path.substring(0, path.length() - klassName.length());
|
||||||
File baseDir = new File(path);
|
File baseDir = new File(path);
|
||||||
File testDir = GenericTestUtils.getTestDir();
|
File testDir =
|
||||||
|
testSubDir == null ? GenericTestUtils.getTestDir()
|
||||||
|
: GenericTestUtils.getTestDir(testSubDir);
|
||||||
testDir = testDir.getAbsoluteFile();
|
testDir = testDir.getAbsoluteFile();
|
||||||
if (!testDir.exists()) {
|
if (!testDir.exists()) {
|
||||||
testDir.mkdirs();
|
testDir.mkdirs();
|
||||||
|
@ -20,9 +20,12 @@
|
|||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
|
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
|
|
||||||
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
@ -43,8 +46,18 @@
|
|||||||
* The DFS filesystem is formated before the testcase starts and after it ends.
|
* The DFS filesystem is formated before the testcase starts and after it ends.
|
||||||
*/
|
*/
|
||||||
public abstract class ClusterMapReduceTestCase {
|
public abstract class ClusterMapReduceTestCase {
|
||||||
|
private static File testRootDir;
|
||||||
|
private static File dfsFolder;
|
||||||
|
|
||||||
private MiniDFSCluster dfsCluster = null;
|
private MiniDFSCluster dfsCluster = null;
|
||||||
private MiniMRCluster mrCluster = null;
|
private MiniMRClientCluster mrCluster = null;
|
||||||
|
|
||||||
|
protected static void setupClassBase(Class<?> testClass) throws Exception {
|
||||||
|
// setup the test root directory
|
||||||
|
testRootDir = GenericTestUtils.setupTestRootDir(testClass);
|
||||||
|
dfsFolder = new File(testRootDir, "dfs");
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Creates Hadoop Cluster and DFS before a test case is run.
|
* Creates Hadoop Cluster and DFS before a test case is run.
|
||||||
@ -78,37 +91,10 @@ protected synchronized void startCluster(boolean reformatDFS, Properties props)
|
|||||||
conf.set((String) entry.getKey(), (String) entry.getValue());
|
conf.set((String) entry.getKey(), (String) entry.getValue());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2)
|
dfsCluster =
|
||||||
.format(reformatDFS).racks(null).build();
|
new MiniDFSCluster.Builder(conf, dfsFolder)
|
||||||
|
.numDataNodes(2).format(reformatDFS).racks(null).build();
|
||||||
ConfigurableMiniMRCluster.setConfiguration(props);
|
mrCluster = MiniMRClientClusterFactory.create(this.getClass(), 2, conf);
|
||||||
//noinspection deprecation
|
|
||||||
mrCluster = new ConfigurableMiniMRCluster(2,
|
|
||||||
getFileSystem().getUri().toString(), 1, conf);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private static class ConfigurableMiniMRCluster extends MiniMRCluster {
|
|
||||||
private static Properties config;
|
|
||||||
|
|
||||||
public static void setConfiguration(Properties props) {
|
|
||||||
config = props;
|
|
||||||
}
|
|
||||||
|
|
||||||
public ConfigurableMiniMRCluster(int numTaskTrackers, String namenode,
|
|
||||||
int numDir, JobConf conf)
|
|
||||||
throws Exception {
|
|
||||||
super(0,0, numTaskTrackers, namenode, numDir, null, null, null, conf);
|
|
||||||
}
|
|
||||||
|
|
||||||
public JobConf createJobConf() {
|
|
||||||
JobConf conf = super.createJobConf();
|
|
||||||
if (config != null) {
|
|
||||||
for (Map.Entry entry : config.entrySet()) {
|
|
||||||
conf.set((String) entry.getKey(), (String) entry.getValue());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return conf;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -125,7 +111,7 @@ public JobConf createJobConf() {
|
|||||||
*/
|
*/
|
||||||
protected void stopCluster() throws Exception {
|
protected void stopCluster() throws Exception {
|
||||||
if (mrCluster != null) {
|
if (mrCluster != null) {
|
||||||
mrCluster.shutdown();
|
mrCluster.stop();
|
||||||
mrCluster = null;
|
mrCluster = null;
|
||||||
}
|
}
|
||||||
if (dfsCluster != null) {
|
if (dfsCluster != null) {
|
||||||
@ -157,17 +143,13 @@ protected FileSystem getFileSystem() throws IOException {
|
|||||||
return dfsCluster.getFileSystem();
|
return dfsCluster.getFileSystem();
|
||||||
}
|
}
|
||||||
|
|
||||||
protected MiniMRCluster getMRCluster() {
|
|
||||||
return mrCluster;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Returns the path to the root directory for the testcase.
|
* Returns the path to the root directory for the testcase.
|
||||||
*
|
*
|
||||||
* @return path to the root directory for the testcase.
|
* @return path to the root directory for the testcase.
|
||||||
*/
|
*/
|
||||||
protected Path getTestRootDir() {
|
protected Path getTestRootDir() {
|
||||||
return new Path("x").getParent();
|
return new Path(testRootDir.getPath());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -194,8 +176,8 @@ protected Path getOutputDir() {
|
|||||||
*
|
*
|
||||||
* @return configuration that works on the testcase Hadoop instance
|
* @return configuration that works on the testcase Hadoop instance
|
||||||
*/
|
*/
|
||||||
protected JobConf createJobConf() {
|
protected JobConf createJobConf() throws IOException {
|
||||||
return mrCluster.createJobConf();
|
return new JobConf(mrCluster.getConfig());
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -55,7 +55,8 @@ public static MiniMRClientCluster create(Class<?> caller, String identifier,
|
|||||||
Path appJar = new Path(testRootDir, "MRAppJar.jar");
|
Path appJar = new Path(testRootDir, "MRAppJar.jar");
|
||||||
|
|
||||||
// Copy MRAppJar and make it private.
|
// Copy MRAppJar and make it private.
|
||||||
Path appMasterJar = new Path(MiniMRYarnCluster.APPJAR);
|
Path appMasterJar =
|
||||||
|
new Path(MiniMRYarnCluster.copyAppJarIntoTestDir(identifier));
|
||||||
|
|
||||||
fs.copyFromLocalFile(appMasterJar, appJar);
|
fs.copyFromLocalFile(appMasterJar, appJar);
|
||||||
fs.setPermission(appJar, new FsPermission("744"));
|
fs.setPermission(appJar, new FsPermission("744"));
|
||||||
@ -64,7 +65,7 @@ public static MiniMRClientCluster create(Class<?> caller, String identifier,
|
|||||||
|
|
||||||
job.addFileToClassPath(appJar);
|
job.addFileToClassPath(appJar);
|
||||||
|
|
||||||
Path callerJar = new Path(JarFinder.getJar(caller));
|
Path callerJar = new Path(JarFinder.getJar(caller, identifier));
|
||||||
Path remoteCallerJar = new Path(testRootDir, callerJar.getName());
|
Path remoteCallerJar = new Path(testRootDir, callerJar.getName());
|
||||||
fs.copyFromLocalFile(callerJar, remoteCallerJar);
|
fs.copyFromLocalFile(callerJar, remoteCallerJar);
|
||||||
fs.setPermission(remoteCallerJar, new FsPermission("744"));
|
fs.setPermission(remoteCallerJar, new FsPermission("744"));
|
||||||
|
@ -37,6 +37,8 @@
|
|||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.mapreduce.TaskCounter;
|
import org.apache.hadoop.mapreduce.TaskCounter;
|
||||||
import org.apache.hadoop.util.ReflectionUtils;
|
import org.apache.hadoop.util.ReflectionUtils;
|
||||||
|
|
||||||
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Ignore;
|
import org.junit.Ignore;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
@ -59,6 +61,11 @@ public class TestBadRecords extends ClusterMapReduceTestCase {
|
|||||||
|
|
||||||
private List<String> input;
|
private List<String> input;
|
||||||
|
|
||||||
|
@BeforeClass
|
||||||
|
public static void setupClass() throws Exception {
|
||||||
|
setupClassBase(TestBadRecords.class);
|
||||||
|
}
|
||||||
|
|
||||||
public TestBadRecords() {
|
public TestBadRecords() {
|
||||||
input = new ArrayList<String>();
|
input = new ArrayList<String>();
|
||||||
for(int i=1;i<=10;i++) {
|
for(int i=1;i<=10;i++) {
|
||||||
|
@ -29,6 +29,8 @@
|
|||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.io.LongWritable;
|
import org.apache.hadoop.io.LongWritable;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
|
||||||
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import static org.junit.Assert.assertTrue;
|
import static org.junit.Assert.assertTrue;
|
||||||
@ -36,6 +38,12 @@
|
|||||||
import static org.junit.Assert.assertNull;
|
import static org.junit.Assert.assertNull;
|
||||||
import static org.junit.Assert.assertFalse;
|
import static org.junit.Assert.assertFalse;
|
||||||
public class TestClusterMapReduceTestCase extends ClusterMapReduceTestCase {
|
public class TestClusterMapReduceTestCase extends ClusterMapReduceTestCase {
|
||||||
|
|
||||||
|
@BeforeClass
|
||||||
|
public static void setupClass() throws Exception {
|
||||||
|
setupClassBase(TestClusterMapReduceTestCase.class);
|
||||||
|
}
|
||||||
|
|
||||||
public void _testMapReduce(boolean restart) throws Exception {
|
public void _testMapReduce(boolean restart) throws Exception {
|
||||||
OutputStream os = getFileSystem().create(new Path(getInputDir(), "text.txt"));
|
OutputStream os = getFileSystem().create(new Path(getInputDir(), "text.txt"));
|
||||||
Writer wr = new OutputStreamWriter(os);
|
Writer wr = new OutputStreamWriter(os);
|
||||||
@ -88,7 +96,6 @@ public void _testMapReduce(boolean restart) throws Exception {
|
|||||||
reader.close();
|
reader.close();
|
||||||
assertEquals(4, counter);
|
assertEquals(4, counter);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -29,12 +29,19 @@
|
|||||||
import org.apache.hadoop.io.LongWritable;
|
import org.apache.hadoop.io.LongWritable;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.mapred.lib.IdentityMapper;
|
import org.apache.hadoop.mapred.lib.IdentityMapper;
|
||||||
|
|
||||||
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
import static org.junit.Assert.assertNull;
|
import static org.junit.Assert.assertNull;
|
||||||
|
|
||||||
public class TestJobName extends ClusterMapReduceTestCase {
|
public class TestJobName extends ClusterMapReduceTestCase {
|
||||||
|
|
||||||
|
@BeforeClass
|
||||||
|
public static void setupClass() throws Exception {
|
||||||
|
setupClassBase(TestJobName.class);
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testComplexName() throws Exception {
|
public void testComplexName() throws Exception {
|
||||||
OutputStream os = getFileSystem().create(new Path(getInputDir(),
|
OutputStream os = getFileSystem().create(new Path(getInputDir(),
|
||||||
|
@ -29,10 +29,17 @@
|
|||||||
import org.apache.hadoop.mapreduce.TestMRJobClient;
|
import org.apache.hadoop.mapreduce.TestMRJobClient;
|
||||||
import org.apache.hadoop.mapreduce.tools.CLI;
|
import org.apache.hadoop.mapreduce.tools.CLI;
|
||||||
import org.apache.hadoop.util.Tool;
|
import org.apache.hadoop.util.Tool;
|
||||||
|
|
||||||
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Ignore;
|
import org.junit.Ignore;
|
||||||
@Ignore
|
@Ignore
|
||||||
public class TestMRCJCJobClient extends TestMRJobClient {
|
public class TestMRCJCJobClient extends TestMRJobClient {
|
||||||
|
|
||||||
|
@BeforeClass
|
||||||
|
public static void setupClass() throws Exception {
|
||||||
|
setupClassBase(TestMRCJCJobClient.class);
|
||||||
|
}
|
||||||
|
|
||||||
private String runJob() throws Exception {
|
private String runJob() throws Exception {
|
||||||
OutputStream os = getFileSystem().create(new Path(getInputDir(),
|
OutputStream os = getFileSystem().create(new Path(getInputDir(),
|
||||||
"text.txt"));
|
"text.txt"));
|
||||||
|
@ -30,6 +30,7 @@
|
|||||||
import org.apache.hadoop.util.ToolRunner;
|
import org.apache.hadoop.util.ToolRunner;
|
||||||
import org.codehaus.jettison.json.JSONException;
|
import org.codehaus.jettison.json.JSONException;
|
||||||
import org.codehaus.jettison.json.JSONObject;
|
import org.codehaus.jettison.json.JSONObject;
|
||||||
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
@ -63,6 +64,11 @@ public class TestMRJobClient extends ClusterMapReduceTestCase {
|
|||||||
private static final Logger LOG =
|
private static final Logger LOG =
|
||||||
LoggerFactory.getLogger(TestMRJobClient.class);
|
LoggerFactory.getLogger(TestMRJobClient.class);
|
||||||
|
|
||||||
|
@BeforeClass
|
||||||
|
public static void setupClass() throws Exception {
|
||||||
|
setupClassBase(TestMRJobClient.class);
|
||||||
|
}
|
||||||
|
|
||||||
private Job runJob(Configuration conf) throws Exception {
|
private Job runJob(Configuration conf) throws Exception {
|
||||||
String input = "hello1\nhello2\nhello3\n";
|
String input = "hello1\nhello2\nhello3\n";
|
||||||
|
|
||||||
|
@ -19,7 +19,6 @@
|
|||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.FileUtil;
|
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.fs.permission.FsPermission;
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
@ -31,58 +30,55 @@
|
|||||||
|
|
||||||
import org.apache.hadoop.mapreduce.MRConfig;
|
import org.apache.hadoop.mapreduce.MRConfig;
|
||||||
import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
|
import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
|
||||||
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
|
import org.apache.hadoop.util.Time;
|
||||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.AfterClass;
|
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.BeforeClass;
|
import org.junit.BeforeClass;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
|
|
||||||
import java.io.BufferedReader;
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileReader;
|
|
||||||
import java.io.FileWriter;
|
import java.io.FileWriter;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.OutputStreamWriter;
|
import java.io.OutputStreamWriter;
|
||||||
import java.io.Writer;
|
import java.io.Writer;
|
||||||
import java.net.URL;
|
|
||||||
|
|
||||||
public class TestEncryptedShuffle {
|
public class TestEncryptedShuffle {
|
||||||
|
|
||||||
private static final String BASEDIR =
|
private static File testRootDir;
|
||||||
System.getProperty("test.build.dir", "target/test-dir") + "/" +
|
|
||||||
TestEncryptedShuffle.class.getSimpleName();
|
|
||||||
|
|
||||||
private String classpathDir;
|
|
||||||
|
|
||||||
@BeforeClass
|
@BeforeClass
|
||||||
public static void setUp() throws Exception {
|
public static void setUp() throws Exception {
|
||||||
File base = new File(BASEDIR);
|
testRootDir =
|
||||||
FileUtil.fullyDelete(base);
|
GenericTestUtils.setupTestRootDir(TestEncryptedShuffle.class);
|
||||||
base.mkdirs();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Before
|
@Before
|
||||||
public void createCustomYarnClasspath() throws Exception {
|
public void createCustomYarnClasspath() throws Exception {
|
||||||
classpathDir = KeyStoreTestUtil.getClasspathDir(TestEncryptedShuffle.class);
|
classpathDir = KeyStoreTestUtil.getClasspathDir(TestEncryptedShuffle.class);
|
||||||
new File(classpathDir, "core-site.xml").delete();
|
new File(classpathDir, "core-site.xml").delete();
|
||||||
|
dfsFolder = new File(testRootDir, String.format("dfs-%d",
|
||||||
|
Time.monotonicNow()));
|
||||||
}
|
}
|
||||||
|
|
||||||
@After
|
@After
|
||||||
public void cleanUpMiniClusterSpecialConfig() throws Exception {
|
public void cleanUpMiniClusterSpecialConfig() throws Exception {
|
||||||
new File(classpathDir, "core-site.xml").delete();
|
new File(classpathDir, "core-site.xml").delete();
|
||||||
String keystoresDir = new File(BASEDIR).getAbsolutePath();
|
String keystoresDir = testRootDir.getAbsolutePath();
|
||||||
KeyStoreTestUtil.cleanupSSLConfig(keystoresDir, classpathDir);
|
KeyStoreTestUtil.cleanupSSLConfig(keystoresDir, classpathDir);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private String classpathDir;
|
||||||
private MiniDFSCluster dfsCluster = null;
|
private MiniDFSCluster dfsCluster = null;
|
||||||
private MiniMRClientCluster mrCluster = null;
|
private MiniMRClientCluster mrCluster = null;
|
||||||
|
private File dfsFolder;
|
||||||
|
|
||||||
private void startCluster(Configuration conf) throws Exception {
|
private void startCluster(Configuration conf) throws Exception {
|
||||||
if (System.getProperty("hadoop.log.dir") == null) {
|
if (System.getProperty("hadoop.log.dir") == null) {
|
||||||
System.setProperty("hadoop.log.dir", "target/test-dir");
|
System.setProperty("hadoop.log.dir", testRootDir.getAbsolutePath());
|
||||||
}
|
}
|
||||||
conf.set("dfs.block.access.token.enable", "false");
|
conf.set("dfs.block.access.token.enable", "false");
|
||||||
conf.set("dfs.permissions", "true");
|
conf.set("dfs.permissions", "true");
|
||||||
@ -92,7 +88,7 @@ private void startCluster(Configuration conf) throws Exception {
|
|||||||
YarnConfiguration.DEFAULT_YARN_CROSS_PLATFORM_APPLICATION_CLASSPATH))
|
YarnConfiguration.DEFAULT_YARN_CROSS_PLATFORM_APPLICATION_CLASSPATH))
|
||||||
+ File.pathSeparator + classpathDir;
|
+ File.pathSeparator + classpathDir;
|
||||||
conf.set(YarnConfiguration.YARN_APPLICATION_CLASSPATH, cp);
|
conf.set(YarnConfiguration.YARN_APPLICATION_CLASSPATH, cp);
|
||||||
dfsCluster = new MiniDFSCluster.Builder(conf).build();
|
dfsCluster = new MiniDFSCluster.Builder(conf, dfsFolder).build();
|
||||||
FileSystem fileSystem = dfsCluster.getFileSystem();
|
FileSystem fileSystem = dfsCluster.getFileSystem();
|
||||||
fileSystem.mkdirs(new Path("/tmp"));
|
fileSystem.mkdirs(new Path("/tmp"));
|
||||||
fileSystem.mkdirs(new Path("/user"));
|
fileSystem.mkdirs(new Path("/user"));
|
||||||
@ -129,7 +125,7 @@ private void encryptedShuffleWithCerts(boolean useClientCerts)
|
|||||||
throws Exception {
|
throws Exception {
|
||||||
try {
|
try {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
String keystoresDir = new File(BASEDIR).getAbsolutePath();
|
String keystoresDir = testRootDir.getAbsolutePath();
|
||||||
String sslConfsDir =
|
String sslConfsDir =
|
||||||
KeyStoreTestUtil.getClasspathDir(TestEncryptedShuffle.class);
|
KeyStoreTestUtil.getClasspathDir(TestEncryptedShuffle.class);
|
||||||
KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfsDir, conf,
|
KeyStoreTestUtil.setupSSLConfig(keystoresDir, sslConfsDir, conf,
|
||||||
|
@ -74,6 +74,7 @@ public MiniMRYarnCluster(String testName) {
|
|||||||
this(testName, 1);
|
this(testName, 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@SuppressWarnings("deprecation")
|
||||||
public MiniMRYarnCluster(String testName, int noOfNMs) {
|
public MiniMRYarnCluster(String testName, int noOfNMs) {
|
||||||
this(testName, noOfNMs, false);
|
this(testName, noOfNMs, false);
|
||||||
}
|
}
|
||||||
@ -84,6 +85,10 @@ public MiniMRYarnCluster(String testName, int noOfNMs, boolean enableAHS) {
|
|||||||
addService(historyServerWrapper);
|
addService(historyServerWrapper);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static String copyAppJarIntoTestDir(String testSubdir) {
|
||||||
|
return JarFinder.getJar(LocalContainerLauncher.class, testSubdir);
|
||||||
|
}
|
||||||
|
|
||||||
public static String getResolvedMRHistoryWebAppURLWithoutScheme(
|
public static String getResolvedMRHistoryWebAppURLWithoutScheme(
|
||||||
Configuration conf, boolean isSSLEnabled) {
|
Configuration conf, boolean isSSLEnabled) {
|
||||||
InetSocketAddress address = null;
|
InetSocketAddress address = null;
|
||||||
|
@ -31,13 +31,13 @@
|
|||||||
import java.util.Properties;
|
import java.util.Properties;
|
||||||
import java.util.StringTokenizer;
|
import java.util.StringTokenizer;
|
||||||
|
|
||||||
|
import org.junit.BeforeClass;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.apache.hadoop.fs.FileUtil;
|
import org.apache.hadoop.fs.FileUtil;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.mapred.ClusterMapReduceTestCase;
|
import org.apache.hadoop.mapred.ClusterMapReduceTestCase;
|
||||||
import org.apache.hadoop.mapred.Counters;
|
import org.apache.hadoop.mapred.Counters;
|
||||||
import org.apache.hadoop.mapred.JobConf;
|
|
||||||
import org.apache.hadoop.mapred.RunningJob;
|
import org.apache.hadoop.mapred.RunningJob;
|
||||||
import org.apache.hadoop.mapred.SkipBadRecords;
|
import org.apache.hadoop.mapred.SkipBadRecords;
|
||||||
import org.apache.hadoop.mapred.Utils;
|
import org.apache.hadoop.mapred.Utils;
|
||||||
@ -66,6 +66,11 @@ public class TestStreamingBadRecords extends ClusterMapReduceTestCase
|
|||||||
UtilTest.makeJavaCommand(BadApp.class, new String[]{"true"});
|
UtilTest.makeJavaCommand(BadApp.class, new String[]{"true"});
|
||||||
private static final int INPUTSIZE=100;
|
private static final int INPUTSIZE=100;
|
||||||
|
|
||||||
|
@BeforeClass
|
||||||
|
public static void setupClass() throws Exception {
|
||||||
|
setupClassBase(TestStreamingBadRecords.class);
|
||||||
|
}
|
||||||
|
|
||||||
public TestStreamingBadRecords() throws IOException
|
public TestStreamingBadRecords() throws IOException
|
||||||
{
|
{
|
||||||
UtilTest utilTest = new UtilTest(getClass().getName());
|
UtilTest utilTest = new UtilTest(getClass().getName());
|
||||||
|
@ -41,8 +41,10 @@
|
|||||||
import org.apache.hadoop.net.ServerSocketUtil;
|
import org.apache.hadoop.net.ServerSocketUtil;
|
||||||
import org.apache.hadoop.service.AbstractService;
|
import org.apache.hadoop.service.AbstractService;
|
||||||
import org.apache.hadoop.service.CompositeService;
|
import org.apache.hadoop.service.CompositeService;
|
||||||
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
import org.apache.hadoop.util.Shell;
|
import org.apache.hadoop.util.Shell;
|
||||||
import org.apache.hadoop.util.Shell.ShellCommandExecutor;
|
import org.apache.hadoop.util.Shell.ShellCommandExecutor;
|
||||||
|
import org.apache.hadoop.util.Time;
|
||||||
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest;
|
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterMetricsRequest;
|
||||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||||
import org.apache.hadoop.yarn.conf.HAUtil;
|
import org.apache.hadoop.yarn.conf.HAUtil;
|
||||||
@ -171,8 +173,11 @@ public MiniYARNCluster(
|
|||||||
this.numLocalDirs = numLocalDirs;
|
this.numLocalDirs = numLocalDirs;
|
||||||
this.numLogDirs = numLogDirs;
|
this.numLogDirs = numLogDirs;
|
||||||
this.enableAHS = enableAHS;
|
this.enableAHS = enableAHS;
|
||||||
String testSubDir = testName.replace("$", "");
|
String yarnFolderName = String.format("yarn-%d", Time.monotonicNow());
|
||||||
File targetWorkDir = new File("target", testSubDir);
|
File targetWorkDirRoot = GenericTestUtils.getTestDir(getName());
|
||||||
|
// make sure that the folder exists
|
||||||
|
targetWorkDirRoot.mkdirs();
|
||||||
|
File targetWorkDir = new File(targetWorkDirRoot, yarnFolderName);
|
||||||
try {
|
try {
|
||||||
FileContext.getLocalFSFileContext().delete(
|
FileContext.getLocalFSFileContext().delete(
|
||||||
new Path(targetWorkDir.getAbsolutePath()), true);
|
new Path(targetWorkDir.getAbsolutePath()), true);
|
||||||
@ -227,6 +232,7 @@ public MiniYARNCluster(
|
|||||||
* @param numLocalDirs the number of nm-local-dirs per nodemanager
|
* @param numLocalDirs the number of nm-local-dirs per nodemanager
|
||||||
* @param numLogDirs the number of nm-log-dirs per nodemanager
|
* @param numLogDirs the number of nm-log-dirs per nodemanager
|
||||||
*/
|
*/
|
||||||
|
@SuppressWarnings("deprecation")
|
||||||
public MiniYARNCluster(
|
public MiniYARNCluster(
|
||||||
String testName, int numResourceManagers, int numNodeManagers,
|
String testName, int numResourceManagers, int numNodeManagers,
|
||||||
int numLocalDirs, int numLogDirs) {
|
int numLocalDirs, int numLogDirs) {
|
||||||
|
Loading…
Reference in New Issue
Block a user