MAPREDUCE-6520. Migrate MR Client test cases part 1. Contributed by Dustin Cote.
This commit is contained in:
parent
6876b9f9f9
commit
247a790609
@ -17,17 +17,30 @@
|
||||
*/
|
||||
package org.apache.hadoop.conf;
|
||||
|
||||
import org.junit.Assert;
|
||||
|
||||
import org.apache.hadoop.mapred.*;
|
||||
import org.apache.hadoop.mapreduce.MRConfig;
|
||||
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.fs.FileUtil;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.LongWritable;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.mapred.FileInputFormat;
|
||||
import org.apache.hadoop.mapred.FileOutputFormat;
|
||||
import org.apache.hadoop.mapred.HadoopTestCase;
|
||||
import org.apache.hadoop.mapred.JobClient;
|
||||
import org.apache.hadoop.mapred.JobConf;
|
||||
import org.apache.hadoop.mapred.TextInputFormat;
|
||||
import org.apache.hadoop.mapred.TextOutputFormat;
|
||||
import org.apache.hadoop.mapred.Utils;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.*;
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.OutputStream;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.Writer;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
/**
|
||||
* This testcase tests that a JobConf without default values submits jobs
|
||||
@ -40,6 +53,7 @@ public TestNoDefaultsJobConf() throws IOException {
|
||||
super(HadoopTestCase.CLUSTER_MR, HadoopTestCase.DFS_FS, 1, 1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNoDefaults() throws Exception {
|
||||
JobConf configuration = new JobConf();
|
||||
assertTrue(configuration.get("hadoop.tmp.dir", null) != null);
|
||||
|
@ -18,13 +18,12 @@
|
||||
|
||||
package org.apache.hadoop.mapred;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||
import org.apache.hadoop.mapreduce.MRConfig;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
@ -38,7 +37,7 @@
|
||||
* Job Configurations should be created using a configuration returned by the
|
||||
* 'createJobConf()' method.
|
||||
*/
|
||||
public abstract class HadoopTestCase extends TestCase {
|
||||
public abstract class HadoopTestCase {
|
||||
public static final int LOCAL_MR = 1;
|
||||
public static final int CLUSTER_MR = 2;
|
||||
public static final int LOCAL_FS = 4;
|
||||
@ -140,8 +139,8 @@ public boolean isLocalFS() {
|
||||
*
|
||||
* @throws Exception
|
||||
*/
|
||||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
if (localFS) {
|
||||
fileSystem = FileSystem.getLocal(new JobConf());
|
||||
}
|
||||
@ -164,7 +163,8 @@ protected void setUp() throws Exception {
|
||||
*
|
||||
* @throws Exception
|
||||
*/
|
||||
protected void tearDown() throws Exception {
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
try {
|
||||
if (mrCluster != null) {
|
||||
mrCluster.shutdown();
|
||||
@ -181,7 +181,6 @@ protected void tearDown() throws Exception {
|
||||
catch (Exception ex) {
|
||||
System.out.println(ex);
|
||||
}
|
||||
super.tearDown();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -34,6 +34,13 @@
|
||||
import java.io.IOException;
|
||||
import java.io.DataOutputStream;
|
||||
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import org.junit.Before;
|
||||
import org.junit.After;
|
||||
import org.junit.Test;
|
||||
|
||||
|
||||
/**
|
||||
* Base class to test Job end notification in local and cluster mode.
|
||||
*
|
||||
@ -140,17 +147,19 @@ protected JobConf createJobConf() {
|
||||
return conf;
|
||||
}
|
||||
|
||||
|
||||
protected void setUp() throws Exception {
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
startHttpServer();
|
||||
}
|
||||
|
||||
protected void tearDown() throws Exception {
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
stopHttpServer();
|
||||
super.tearDown();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMR() throws Exception {
|
||||
|
||||
System.out.println(launchWordCount(this.createJobConf(),
|
||||
|
@ -30,12 +30,16 @@
|
||||
import java.io.OutputStream;
|
||||
import java.util.Iterator;
|
||||
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class TestFileOutputFormat extends HadoopTestCase {
|
||||
|
||||
public TestFileOutputFormat() throws IOException {
|
||||
super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCustomFile() throws Exception {
|
||||
Path inDir = new Path("testing/fileoutputformat/input");
|
||||
Path outDir = new Path("testing/fileoutputformat/output");
|
||||
|
@ -17,9 +17,6 @@
|
||||
*/
|
||||
package org.apache.hadoop.mapred;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.FileUtil;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
@ -27,9 +24,18 @@
|
||||
import org.apache.hadoop.ipc.ProtocolSignature;
|
||||
import org.apache.hadoop.mapred.SortedRanges.Range;
|
||||
import org.apache.hadoop.mapreduce.TaskType;
|
||||
import org.apache.hadoop.mapreduce.checkpoint.CheckpointID;
|
||||
import org.apache.hadoop.mapreduce.checkpoint.FSCheckpointID;
|
||||
import org.apache.hadoop.mapreduce.checkpoint.TaskCheckpointID;
|
||||
import org.junit.After;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
|
||||
|
||||
public class TestTaskCommit extends HadoopTestCase {
|
||||
@ -80,12 +86,13 @@ public TestTaskCommit() throws IOException {
|
||||
super(LOCAL_MR, LOCAL_FS, 1, 1);
|
||||
}
|
||||
|
||||
@Override
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
super.tearDown();
|
||||
FileUtil.fullyDelete(new File(rootDir.toString()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCommitFail() throws IOException {
|
||||
final Path inDir = new Path(rootDir, "./input");
|
||||
final Path outDir = new Path(rootDir, "./output");
|
||||
@ -199,6 +206,7 @@ public void setCheckpointID(TaskID downgrade, TaskCheckpointID cid) {
|
||||
*
|
||||
* @throws Exception
|
||||
*/
|
||||
@Test
|
||||
public void testTaskCleanupDoesNotCommit() throws Exception {
|
||||
// Mimic a job with a special committer that does not cleanup
|
||||
// files when a task fails.
|
||||
@ -245,16 +253,19 @@ public void testTaskCleanupDoesNotCommit() throws Exception {
|
||||
assertTrue("Task did not succeed", umbilical.taskDone);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCommitRequiredForMapTask() throws Exception {
|
||||
Task testTask = createDummyTask(TaskType.MAP);
|
||||
assertTrue("MapTask should need commit", testTask.isCommitRequired());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCommitRequiredForReduceTask() throws Exception {
|
||||
Task testTask = createDummyTask(TaskType.REDUCE);
|
||||
assertTrue("ReduceTask should need commit", testTask.isCommitRequired());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCommitNotRequiredForJobSetup() throws Exception {
|
||||
Task testTask = createDummyTask(TaskType.MAP);
|
||||
testTask.setJobSetupTask();
|
||||
@ -262,6 +273,7 @@ public void testCommitNotRequiredForJobSetup() throws Exception {
|
||||
testTask.isCommitRequired());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCommitNotRequiredForJobCleanup() throws Exception {
|
||||
Task testTask = createDummyTask(TaskType.MAP);
|
||||
testTask.setJobCleanupTask();
|
||||
@ -269,6 +281,7 @@ public void testCommitNotRequiredForJobCleanup() throws Exception {
|
||||
testTask.isCommitRequired());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCommitNotRequiredForTaskCleanup() throws Exception {
|
||||
Task testTask = createDummyTask(TaskType.REDUCE);
|
||||
testTask.setTaskCleanupTask();
|
||||
|
@ -27,6 +27,8 @@
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.mapred.HadoopTestCase;
|
||||
import org.apache.hadoop.mapred.JobConf;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
/**
|
||||
* HadoopTestCase that tests the local job runner.
|
||||
@ -59,6 +61,7 @@ public TestLocalJobControl() throws IOException {
|
||||
* object. Finally, it creates a thread to run the JobControl object and
|
||||
* monitors/reports the job states.
|
||||
*/
|
||||
@Test
|
||||
public void testLocalJobControlDataCopy() throws Exception {
|
||||
|
||||
FileSystem fs = FileSystem.get(createJobConf());
|
||||
|
@ -21,12 +21,29 @@
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.LongWritable;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.mapred.*;
|
||||
import org.apache.hadoop.mapred.FileInputFormat;
|
||||
import org.apache.hadoop.mapred.FileOutputFormat;
|
||||
import org.apache.hadoop.mapred.HadoopTestCase;
|
||||
import org.apache.hadoop.mapred.JobClient;
|
||||
import org.apache.hadoop.mapred.JobConf;
|
||||
import org.apache.hadoop.mapred.Mapper;
|
||||
import org.apache.hadoop.mapred.OutputCollector;
|
||||
import org.apache.hadoop.mapred.Reducer;
|
||||
import org.apache.hadoop.mapred.Reporter;
|
||||
import org.apache.hadoop.mapred.RunningJob;
|
||||
import org.apache.hadoop.mapred.TextInputFormat;
|
||||
import org.apache.hadoop.mapred.TextOutputFormat;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.DataOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotSame;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
public class TestChainMapReduce extends HadoopTestCase {
|
||||
|
||||
private static Path getFlagDir(boolean local) {
|
||||
@ -67,6 +84,7 @@ public TestChainMapReduce() throws IOException {
|
||||
super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testChain() throws Exception {
|
||||
Path inDir = new Path("testing/chain/input");
|
||||
Path outDir = new Path("testing/chain/output");
|
||||
|
@ -36,6 +36,8 @@
|
||||
import org.apache.hadoop.mapred.Utils;
|
||||
import org.junit.After;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
|
@ -17,11 +17,6 @@
|
||||
*/
|
||||
package org.apache.hadoop.mapred.lib;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.mapred.InputFormat;
|
||||
import org.apache.hadoop.mapred.JobConf;
|
||||
@ -30,12 +25,19 @@
|
||||
import org.apache.hadoop.mapred.OutputCollector;
|
||||
import org.apache.hadoop.mapred.Reporter;
|
||||
import org.apache.hadoop.mapred.TextInputFormat;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
/**
|
||||
* @see TestDelegatingInputFormat
|
||||
*/
|
||||
public class TestMultipleInputs extends TestCase {
|
||||
public class TestMultipleInputs {
|
||||
|
||||
@Test
|
||||
public void testAddInputPathWithFormat() {
|
||||
final JobConf conf = new JobConf();
|
||||
MultipleInputs.addInputPath(conf, new Path("/foo"), TextInputFormat.class);
|
||||
@ -48,6 +50,7 @@ public void testAddInputPathWithFormat() {
|
||||
.getClass());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAddInputPathWithMapper() {
|
||||
final JobConf conf = new JobConf();
|
||||
MultipleInputs.addInputPath(conf, new Path("/foo"), TextInputFormat.class,
|
||||
|
@ -24,7 +24,23 @@
|
||||
import org.apache.hadoop.io.SequenceFile;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.io.serializer.JavaSerializationComparator;
|
||||
import org.apache.hadoop.mapred.*;
|
||||
import org.apache.hadoop.mapred.Counters;
|
||||
import org.apache.hadoop.mapred.FileInputFormat;
|
||||
import org.apache.hadoop.mapred.FileOutputFormat;
|
||||
import org.apache.hadoop.mapred.HadoopTestCase;
|
||||
import org.apache.hadoop.mapred.JobClient;
|
||||
import org.apache.hadoop.mapred.JobConf;
|
||||
import org.apache.hadoop.mapred.Mapper;
|
||||
import org.apache.hadoop.mapred.OutputCollector;
|
||||
import org.apache.hadoop.mapred.Reducer;
|
||||
import org.apache.hadoop.mapred.Reporter;
|
||||
import org.apache.hadoop.mapred.RunningJob;
|
||||
import org.apache.hadoop.mapred.SequenceFileOutputFormat;
|
||||
import org.apache.hadoop.mapred.TextInputFormat;
|
||||
import org.apache.hadoop.mapred.TextOutputFormat;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.DataOutputStream;
|
||||
@ -32,17 +48,23 @@
|
||||
import java.io.InputStreamReader;
|
||||
import java.util.Iterator;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class TestMultipleOutputs extends HadoopTestCase {
|
||||
|
||||
public TestMultipleOutputs() throws IOException {
|
||||
super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWithoutCounters() throws Exception {
|
||||
_testMultipleOutputs(false);
|
||||
_testMOWithJavaSerialization(false);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWithCounters() throws Exception {
|
||||
_testMultipleOutputs(true);
|
||||
_testMOWithJavaSerialization(true);
|
||||
@ -62,6 +84,7 @@ private Path getDir(Path dir) {
|
||||
return dir;
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
Path rootDir = getDir(ROOT_DIR);
|
||||
@ -75,6 +98,7 @@ public void setUp() throws Exception {
|
||||
}
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
Path rootDir = getDir(ROOT_DIR);
|
||||
|
||||
|
@ -22,26 +22,45 @@
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.LongWritable;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.mapred.*;
|
||||
import org.apache.hadoop.mapred.FileInputFormat;
|
||||
import org.apache.hadoop.mapred.FileOutputFormat;
|
||||
import org.apache.hadoop.mapred.HadoopTestCase;
|
||||
import org.apache.hadoop.mapred.JobClient;
|
||||
import org.apache.hadoop.mapred.JobConf;
|
||||
import org.apache.hadoop.mapred.Mapper;
|
||||
import org.apache.hadoop.mapred.OutputCollector;
|
||||
import org.apache.hadoop.mapred.Reducer;
|
||||
import org.apache.hadoop.mapred.Reporter;
|
||||
import org.apache.hadoop.mapred.RunningJob;
|
||||
import org.apache.hadoop.mapred.TextInputFormat;
|
||||
import org.apache.hadoop.mapred.TextOutputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.map.MultithreadedMapper;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.DataOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class TestMultithreadedMapRunner extends HadoopTestCase {
|
||||
|
||||
public TestMultithreadedMapRunner() throws IOException {
|
||||
super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testOKRun() throws Exception {
|
||||
run(false, false);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIOExRun() throws Exception {
|
||||
run(true, false);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testRuntimeExRun() throws Exception {
|
||||
run(false, true);
|
||||
}
|
||||
|
@ -30,6 +30,13 @@
|
||||
import org.apache.hadoop.mapred.HadoopTestCase;
|
||||
import org.apache.hadoop.mapred.JobConf;
|
||||
import org.apache.log4j.Level;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
|
||||
public class TestChild extends HadoopTestCase {
|
||||
private static String TEST_ROOT_DIR =
|
||||
@ -146,6 +153,7 @@ private Job submitAndValidateJob(JobConf conf, int numMaps, int numReds,
|
||||
return job;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testChild() throws Exception {
|
||||
try {
|
||||
submitAndValidateJob(createJobConf(), 1, 1, true);
|
||||
|
@ -30,6 +30,9 @@
|
||||
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
|
||||
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
@Ignore
|
||||
public class TestNoJobSetupCleanup extends HadoopTestCase {
|
||||
private static String TEST_ROOT_DIR =
|
||||
@ -69,6 +72,7 @@ private Job submitAndValidateJob(Configuration conf, int numMaps, int numReds)
|
||||
return job;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNoJobSetupCleanup() throws Exception {
|
||||
try {
|
||||
Configuration conf = createJobConf();
|
||||
|
@ -33,6 +33,8 @@
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
/**
|
||||
* Tests context api and {@link StatusReporter#getProgress()} via
|
||||
|
@ -29,6 +29,10 @@
|
||||
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
||||
import org.apache.hadoop.mapreduce.Mapper;
|
||||
import org.apache.hadoop.mapreduce.Reducer;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
/**
|
||||
* Tests error conditions in ChainMapper/ChainReducer.
|
||||
@ -51,6 +55,7 @@ public TestChainErrors() throws IOException {
|
||||
*
|
||||
* @throws Exception
|
||||
*/
|
||||
@Test
|
||||
public void testChainSubmission() throws Exception {
|
||||
|
||||
Configuration conf = createJobConf();
|
||||
@ -89,6 +94,7 @@ public void testChainSubmission() throws Exception {
|
||||
*
|
||||
* @throws Exception
|
||||
*/
|
||||
@Test
|
||||
public void testChainFail() throws Exception {
|
||||
|
||||
Configuration conf = createJobConf();
|
||||
@ -114,6 +120,7 @@ public void testChainFail() throws Exception {
|
||||
*
|
||||
* @throws Exception
|
||||
*/
|
||||
@Test
|
||||
public void testReducerFail() throws Exception {
|
||||
|
||||
Configuration conf = createJobConf();
|
||||
@ -139,6 +146,7 @@ public void testReducerFail() throws Exception {
|
||||
*
|
||||
* @throws Exception
|
||||
*/
|
||||
@Test
|
||||
public void testChainMapNoOuptut() throws Exception {
|
||||
Configuration conf = createJobConf();
|
||||
String expectedOutput = "";
|
||||
@ -163,6 +171,7 @@ public void testChainMapNoOuptut() throws Exception {
|
||||
*
|
||||
* @throws Exception
|
||||
*/
|
||||
@Test
|
||||
public void testChainReduceNoOuptut() throws Exception {
|
||||
Configuration conf = createJobConf();
|
||||
String expectedOutput = "";
|
||||
|
@ -30,6 +30,11 @@
|
||||
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
||||
import org.apache.hadoop.mapreduce.Mapper;
|
||||
import org.apache.hadoop.mapreduce.Reducer;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.junit.Assert.fail;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class TestMapReduceChain extends HadoopTestCase {
|
||||
|
||||
@ -63,6 +68,7 @@ public TestMapReduceChain() throws IOException {
|
||||
super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testChain() throws Exception {
|
||||
Path inDir = new Path(localPathRoot, "testing/chain/input");
|
||||
Path outDir = new Path(localPathRoot, "testing/chain/output");
|
||||
|
@ -26,6 +26,9 @@
|
||||
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
||||
import org.apache.hadoop.mapreduce.lib.map.TokenCounterMapper;
|
||||
import org.apache.hadoop.mapreduce.lib.reduce.IntSumReducer;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@ -42,6 +45,7 @@ public TestSingleElementChain() throws IOException {
|
||||
}
|
||||
|
||||
// test chain mapper and reducer by adding single mapper and reducer to chain
|
||||
@Test
|
||||
public void testNoChain() throws Exception {
|
||||
Path inDir = new Path(localPathRoot, "testing/chain/input");
|
||||
Path outDir = new Path(localPathRoot, "testing/chain/output");
|
||||
|
@ -18,25 +18,40 @@
|
||||
|
||||
package org.apache.hadoop.mapreduce.lib.db;
|
||||
|
||||
import java.sql.*;
|
||||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
//import org.apache.hadoop.examples.DBCountPageView;
|
||||
import org.apache.hadoop.fs.*;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.NullWritable;
|
||||
import org.apache.hadoop.io.WritableComparable;
|
||||
import org.apache.hadoop.mapred.HadoopTestCase;
|
||||
import org.apache.hadoop.mapreduce.*;
|
||||
import org.apache.hadoop.mapreduce.lib.db.*;
|
||||
import org.apache.hadoop.mapreduce.lib.input.*;
|
||||
import org.apache.hadoop.mapreduce.lib.output.*;
|
||||
import org.apache.hadoop.mapreduce.Job;
|
||||
import org.apache.hadoop.mapreduce.Mapper;
|
||||
import org.apache.hadoop.mapreduce.Reducer;
|
||||
import org.apache.hadoop.mapreduce.TaskCounter;
|
||||
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.hsqldb.server.Server;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
import java.sql.Connection;
|
||||
import java.sql.Date;
|
||||
import java.sql.DriverManager;
|
||||
import java.sql.PreparedStatement;
|
||||
import java.sql.ResultSet;
|
||||
import java.sql.SQLException;
|
||||
import java.sql.Statement;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
//import org.apache.hadoop.examples.DBCountPageView;
|
||||
|
||||
/**
|
||||
* Test aspects of DataDrivenDBInputFormat
|
||||
@ -109,11 +124,13 @@ private void initialize(String driverClassName, String url)
|
||||
createConnection(driverClassName, url);
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
initialize(DRIVER_CLASS, DB_URL);
|
||||
super.setUp();
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
super.tearDown();
|
||||
shutdown();
|
||||
@ -170,6 +187,7 @@ public void map(Object k, Object v, Context c)
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDateSplits() throws Exception {
|
||||
Statement s = connection.createStatement();
|
||||
final String DATE_TABLE = "datetable";
|
||||
|
@ -38,6 +38,8 @@
|
||||
import org.apache.hadoop.mapreduce.Reducer;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
/**
|
||||
* @see TestDelegatingInputFormat
|
||||
@ -139,7 +141,7 @@ public void testDoMultipleInputs() throws IOException {
|
||||
assertTrue(output.readLine().equals("e 2"));
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Test
|
||||
public void testAddInputPathWithFormat() throws IOException {
|
||||
final Job conf = Job.getInstance();
|
||||
MultipleInputs.addInputPath(conf, new Path("/foo"), TextInputFormat.class);
|
||||
@ -152,7 +154,7 @@ public void testAddInputPathWithFormat() throws IOException {
|
||||
.getClass());
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Test
|
||||
public void testAddInputPathWithMapper() throws IOException {
|
||||
final Job conf = Job.getInstance();
|
||||
MultipleInputs.addInputPath(conf, new Path("/foo"), TextInputFormat.class,
|
||||
|
@ -33,6 +33,9 @@
|
||||
import org.apache.hadoop.mapreduce.Job;
|
||||
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
|
||||
/**
|
||||
* This class performs unit test for Job/JobControl classes.
|
||||
@ -121,6 +124,7 @@ private void waitTillAllFinished(JobControl theControl) {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testJobControlWithFailJob() throws Exception {
|
||||
LOG.info("Starting testJobControlWithFailJob");
|
||||
Configuration conf = createJobConf();
|
||||
@ -144,6 +148,7 @@ public void testJobControlWithFailJob() throws Exception {
|
||||
theControl.stop();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testJobControlWithKillJob() throws Exception {
|
||||
LOG.info("Starting testJobControlWithKillJob");
|
||||
|
||||
@ -182,6 +187,7 @@ public void testJobControlWithKillJob() throws Exception {
|
||||
theControl.stop();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testJobControl() throws Exception {
|
||||
LOG.info("Starting testJobControl");
|
||||
|
||||
|
@ -23,23 +23,33 @@
|
||||
import org.apache.hadoop.io.LongWritable;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.mapred.HadoopTestCase;
|
||||
import org.apache.hadoop.mapreduce.*;
|
||||
import org.apache.hadoop.mapreduce.Job;
|
||||
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
||||
import org.apache.hadoop.mapreduce.Mapper;
|
||||
import org.apache.hadoop.mapreduce.Reducer;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class TestMultithreadedMapper extends HadoopTestCase {
|
||||
|
||||
public TestMultithreadedMapper() throws IOException {
|
||||
super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testOKRun() throws Exception {
|
||||
run(false, false);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIOExRun() throws Exception {
|
||||
run(true, false);
|
||||
}
|
||||
@Test
|
||||
public void testRuntimeExRun() throws Exception {
|
||||
run(false, true);
|
||||
}
|
||||
|
@ -33,6 +33,11 @@
|
||||
import org.apache.hadoop.mapreduce.OutputCommitter;
|
||||
import org.apache.hadoop.mapreduce.OutputFormat;
|
||||
import org.apache.hadoop.mapreduce.TaskAttemptContext;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
|
||||
/**
|
||||
* A JUnit test to test Map-Reduce job committer.
|
||||
@ -54,15 +59,15 @@ public TestJobOutputCommitter() throws IOException {
|
||||
private FileSystem fs;
|
||||
private Configuration conf = null;
|
||||
|
||||
@Override
|
||||
protected void setUp() throws Exception {
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
conf = createJobConf();
|
||||
fs = getFileSystem();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void tearDown() throws Exception {
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
fs.delete(new Path(TEST_ROOT_DIR), true);
|
||||
super.tearDown();
|
||||
}
|
||||
@ -219,6 +224,7 @@ private void testKilledJob(String fileName,
|
||||
*
|
||||
* @throws Exception
|
||||
*/
|
||||
@Test
|
||||
public void testDefaultCleanupAndAbort() throws Exception {
|
||||
// check with a successful job
|
||||
testSuccessfulJob(FileOutputCommitter.SUCCEEDED_FILE_NAME,
|
||||
@ -238,6 +244,7 @@ public void testDefaultCleanupAndAbort() throws Exception {
|
||||
*
|
||||
* @throws Exception
|
||||
*/
|
||||
@Test
|
||||
public void testCustomAbort() throws Exception {
|
||||
// check with a successful job
|
||||
testSuccessfulJob(FileOutputCommitter.SUCCEEDED_FILE_NAME,
|
||||
@ -264,6 +271,7 @@ public void testCustomAbort() throws Exception {
|
||||
* compatibility testing.
|
||||
* @throws Exception
|
||||
*/
|
||||
@Test
|
||||
public void testCustomCleanup() throws Exception {
|
||||
// check with a successful job
|
||||
testSuccessfulJob(CUSTOM_CLEANUP_FILE_NAME,
|
||||
|
@ -27,23 +27,36 @@
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.io.serializer.JavaSerializationComparator;
|
||||
import org.apache.hadoop.mapred.HadoopTestCase;
|
||||
import org.apache.hadoop.mapreduce.*;
|
||||
import org.apache.hadoop.mapreduce.CounterGroup;
|
||||
import org.apache.hadoop.mapreduce.Job;
|
||||
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
||||
import org.apache.hadoop.mapreduce.Mapper;
|
||||
import org.apache.hadoop.mapreduce.Reducer;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStreamReader;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class TestMRMultipleOutputs extends HadoopTestCase {
|
||||
|
||||
public TestMRMultipleOutputs() throws IOException {
|
||||
super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWithoutCounters() throws Exception {
|
||||
_testMultipleOutputs(false);
|
||||
_testMOWithJavaSerialization(false);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWithCounters() throws Exception {
|
||||
_testMultipleOutputs(true);
|
||||
_testMOWithJavaSerialization(true);
|
||||
@ -57,6 +70,7 @@ public void testWithCounters() throws Exception {
|
||||
private static String TEXT = "text";
|
||||
private static String SEQUENCE = "sequence";
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
Configuration conf = createJobConf();
|
||||
@ -64,6 +78,7 @@ public void setUp() throws Exception {
|
||||
fs.delete(ROOT_DIR, true);
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
Configuration conf = createJobConf();
|
||||
FileSystem fs = FileSystem.get(conf);
|
||||
|
@ -18,8 +18,6 @@
|
||||
|
||||
package org.apache.hadoop.mapreduce.lib.partition;
|
||||
|
||||
import java.io.*;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileUtil;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
@ -32,6 +30,15 @@
|
||||
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
||||
import org.apache.hadoop.mapreduce.Reducer;
|
||||
import org.apache.hadoop.mapreduce.lib.map.InverseMapper;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
|
||||
public class TestMRKeyFieldBasedComparator extends HadoopTestCase {
|
||||
@ -94,6 +101,7 @@ private void testComparator(String keySpec, int expect)
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBasicUnixComparator() throws Exception {
|
||||
testComparator("-k1,1n", 1);
|
||||
testComparator("-k2,2n", 1);
|
||||
@ -117,7 +125,7 @@ public void testBasicUnixComparator() throws Exception {
|
||||
byte[] line1_bytes = line1.getBytes();
|
||||
byte[] line2_bytes = line2.getBytes();
|
||||
|
||||
public void testWithoutMRJob(String keySpec, int expect) throws Exception {
|
||||
private void testWithoutMRJob(String keySpec, int expect) throws Exception {
|
||||
KeyFieldBasedComparator<Void, Void> keyFieldCmp =
|
||||
new KeyFieldBasedComparator<Void, Void>();
|
||||
conf.set("mapreduce.partition.keycomparator.options", keySpec);
|
||||
|
@ -27,6 +27,12 @@
|
||||
import org.apache.hadoop.mapred.FileAlreadyExistsException;
|
||||
import org.apache.hadoop.mapred.HadoopTestCase;
|
||||
import org.apache.hadoop.util.ToolRunner;
|
||||
import org.junit.After;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
public class TestTeraSort extends HadoopTestCase {
|
||||
private static Log LOG = LogFactory.getLog(TestTeraSort.class);
|
||||
|
||||
@ -35,7 +41,8 @@ public TestTeraSort()
|
||||
super(LOCAL_MR, LOCAL_FS, 1, 1);
|
||||
}
|
||||
|
||||
protected void tearDown() throws Exception {
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
getFileSystem().delete(new Path(TEST_DIR), true);
|
||||
super.tearDown();
|
||||
}
|
||||
@ -76,6 +83,7 @@ private void runTeraValidator(Configuration job,
|
||||
assertEquals(ToolRunner.run(job, new TeraValidate(), svArgs), 0);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTeraSort() throws Exception {
|
||||
// Run TeraGen to generate input for 'terasort'
|
||||
runTeraGen(createJobConf(), SORT_INPUT_PATH);
|
||||
@ -104,6 +112,7 @@ public void testTeraSort() throws Exception {
|
||||
TERA_OUTPUT_PATH);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTeraSortWithLessThanTwoArgs() throws Exception {
|
||||
String[] args = new String[1];
|
||||
assertEquals(new TeraSort().run(args), 2);
|
||||
|
Loading…
Reference in New Issue
Block a user