MAPREDUCE-7475. Fix non-idempotent unit tests (#6785)
Contributed by Kaiyao Ke
This commit is contained in:
parent
8f92cda35c
commit
41eacf4914
@ -319,6 +319,8 @@ public void testAttempts() {
|
||||
appController.attempts();
|
||||
|
||||
assertEquals(AttemptsPage.class, appController.getClazz());
|
||||
|
||||
appController.getProperty().remove(AMParams.ATTEMPT_STATE);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -32,6 +32,7 @@
|
||||
import org.apache.hadoop.mapreduce.TaskType;
|
||||
import org.apache.hadoop.util.Progress;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
@ -47,14 +48,21 @@
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
||||
public class TestMapTask {
|
||||
private static File TEST_ROOT_DIR = new File(
|
||||
private static File testRootDir = new File(
|
||||
System.getProperty("test.build.data",
|
||||
System.getProperty("java.io.tmpdir", "/tmp")),
|
||||
TestMapTask.class.getName());
|
||||
|
||||
@Before
|
||||
public void setup() throws Exception {
|
||||
if(!testRootDir.exists()) {
|
||||
testRootDir.mkdirs();
|
||||
}
|
||||
}
|
||||
|
||||
@After
|
||||
public void cleanup() throws Exception {
|
||||
FileUtil.fullyDelete(TEST_ROOT_DIR);
|
||||
FileUtil.fullyDelete(testRootDir);
|
||||
}
|
||||
|
||||
@Rule
|
||||
@ -66,7 +74,7 @@ public void cleanup() throws Exception {
|
||||
public void testShufflePermissions() throws Exception {
|
||||
JobConf conf = new JobConf();
|
||||
conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077");
|
||||
conf.set(MRConfig.LOCAL_DIR, TEST_ROOT_DIR.getAbsolutePath());
|
||||
conf.set(MRConfig.LOCAL_DIR, testRootDir.getAbsolutePath());
|
||||
MapOutputFile mof = new MROutputFiles();
|
||||
mof.setConf(conf);
|
||||
TaskAttemptID attemptId = new TaskAttemptID("12345", 1, TaskType.MAP, 1, 1);
|
||||
@ -98,7 +106,7 @@ public void testShufflePermissions() throws Exception {
|
||||
public void testSpillFilesCountLimitInvalidValue() throws Exception {
|
||||
JobConf conf = new JobConf();
|
||||
conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077");
|
||||
conf.set(MRConfig.LOCAL_DIR, TEST_ROOT_DIR.getAbsolutePath());
|
||||
conf.set(MRConfig.LOCAL_DIR, testRootDir.getAbsolutePath());
|
||||
conf.setInt(MRJobConfig.SPILL_FILES_COUNT_LIMIT, -2);
|
||||
MapOutputFile mof = new MROutputFiles();
|
||||
mof.setConf(conf);
|
||||
@ -124,7 +132,7 @@ public void testSpillFilesCountLimitInvalidValue() throws Exception {
|
||||
public void testSpillFilesCountBreach() throws Exception {
|
||||
JobConf conf = new JobConf();
|
||||
conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077");
|
||||
conf.set(MRConfig.LOCAL_DIR, TEST_ROOT_DIR.getAbsolutePath());
|
||||
conf.set(MRConfig.LOCAL_DIR, testRootDir.getAbsolutePath());
|
||||
conf.setInt(MRJobConfig.SPILL_FILES_COUNT_LIMIT, 2);
|
||||
MapOutputFile mof = new MROutputFiles();
|
||||
mof.setConf(conf);
|
||||
|
@ -35,6 +35,7 @@
|
||||
import org.apache.hadoop.mapreduce.checkpoint.TaskCheckpointID;
|
||||
import org.apache.hadoop.util.ExitUtil;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
@ -180,6 +181,11 @@ protected void checkTaskLimits() throws TaskLimitException {
|
||||
}
|
||||
}
|
||||
|
||||
@Before
|
||||
public void setup() {
|
||||
statusUpdateTimes = 0;
|
||||
}
|
||||
|
||||
@After
|
||||
public void cleanup() {
|
||||
FileSystem.clearStatistics();
|
||||
|
@ -158,6 +158,8 @@ public void setUp() throws Exception {
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
stopHttpServer();
|
||||
NotificationServlet.counter = 0;
|
||||
NotificationServlet.failureCounter = 0;
|
||||
super.tearDown();
|
||||
}
|
||||
|
||||
|
@ -18,11 +18,16 @@
|
||||
|
||||
package org.apache.hadoop.mapred;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
|
||||
import org.apache.hadoop.fs.FileUtil;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.LongWritable;
|
||||
import org.apache.hadoop.io.RawComparator;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.test.GenericTestUtils;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
@ -34,12 +39,9 @@
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
public class TestOldCombinerGrouping {
|
||||
private static String TEST_ROOT_DIR = new File(System.getProperty(
|
||||
"test.build.data", "build/test/data"), UUID.randomUUID().toString())
|
||||
.getAbsolutePath();
|
||||
private static File testRootDir = GenericTestUtils.getRandomizedTestDir();
|
||||
|
||||
public static class Map implements
|
||||
Mapper<LongWritable, Text, Text, LongWritable> {
|
||||
@ -117,16 +119,21 @@ public int compare(Text o1, Text o2) {
|
||||
|
||||
}
|
||||
|
||||
@After
|
||||
public void cleanup() {
|
||||
FileUtil.fullyDelete(testRootDir);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCombiner() throws Exception {
|
||||
if (!new File(TEST_ROOT_DIR).mkdirs()) {
|
||||
throw new RuntimeException("Could not create test dir: " + TEST_ROOT_DIR);
|
||||
if (!testRootDir.mkdirs()) {
|
||||
throw new RuntimeException("Could not create test dir: " + testRootDir);
|
||||
}
|
||||
File in = new File(TEST_ROOT_DIR, "input");
|
||||
File in = new File(testRootDir, "input");
|
||||
if (!in.mkdirs()) {
|
||||
throw new RuntimeException("Could not create test dir: " + in);
|
||||
}
|
||||
File out = new File(TEST_ROOT_DIR, "output");
|
||||
File out = new File(testRootDir, "output");
|
||||
PrintWriter pw = new PrintWriter(new FileWriter(new File(in, "data.txt")));
|
||||
pw.println("A|a,1");
|
||||
pw.println("A|b,2");
|
||||
|
@ -18,7 +18,10 @@
|
||||
|
||||
package org.apache.hadoop.mapreduce;
|
||||
|
||||
import org.junit.After;
|
||||
import org.junit.Assert;
|
||||
|
||||
import org.apache.hadoop.fs.FileUtil;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.LongWritable;
|
||||
import org.apache.hadoop.io.RawComparator;
|
||||
@ -26,6 +29,8 @@
|
||||
import org.apache.hadoop.mapred.JobConf;
|
||||
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
|
||||
|
||||
import org.apache.hadoop.test.GenericTestUtils;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
@ -36,12 +41,9 @@
|
||||
import java.io.PrintWriter;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.UUID;
|
||||
|
||||
public class TestNewCombinerGrouping {
|
||||
private static String TEST_ROOT_DIR = new File(System.getProperty(
|
||||
"test.build.data", "build/test/data"), UUID.randomUUID().toString())
|
||||
.getAbsolutePath();
|
||||
private static File testRootDir = GenericTestUtils.getRandomizedTestDir();
|
||||
|
||||
public static class Map extends
|
||||
Mapper<LongWritable, Text, Text, LongWritable> {
|
||||
@ -103,16 +105,21 @@ public int compare(Text o1, Text o2) {
|
||||
|
||||
}
|
||||
|
||||
@After
|
||||
public void cleanup() {
|
||||
FileUtil.fullyDelete(testRootDir);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCombiner() throws Exception {
|
||||
if (!new File(TEST_ROOT_DIR).mkdirs()) {
|
||||
throw new RuntimeException("Could not create test dir: " + TEST_ROOT_DIR);
|
||||
if (!testRootDir.mkdirs()) {
|
||||
throw new RuntimeException("Could not create test dir: " + testRootDir);
|
||||
}
|
||||
File in = new File(TEST_ROOT_DIR, "input");
|
||||
File in = new File(testRootDir, "input");
|
||||
if (!in.mkdirs()) {
|
||||
throw new RuntimeException("Could not create test dir: " + in);
|
||||
}
|
||||
File out = new File(TEST_ROOT_DIR, "output");
|
||||
File out = new File(testRootDir, "output");
|
||||
PrintWriter pw = new PrintWriter(new FileWriter(new File(in, "data.txt")));
|
||||
pw.println("A|a,1");
|
||||
pw.println("A|b,2");
|
||||
|
Loading…
Reference in New Issue
Block a user