MAPREDUCE-7475. Fix non-idempotent unit tests (#6785)

Contributed by Kaiyao Ke
This commit is contained in:
Kaiyao Ke 2024-05-17 08:51:47 -05:00 committed by GitHub
parent 8f92cda35c
commit 41eacf4914
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
6 changed files with 53 additions and 21 deletions

View File

@ -319,6 +319,8 @@ public void testAttempts() {
appController.attempts(); appController.attempts();
assertEquals(AttemptsPage.class, appController.getClazz()); assertEquals(AttemptsPage.class, appController.getClazz());
appController.getProperty().remove(AMParams.ATTEMPT_STATE);
} }
} }

View File

@ -32,6 +32,7 @@
import org.apache.hadoop.mapreduce.TaskType; import org.apache.hadoop.mapreduce.TaskType;
import org.apache.hadoop.util.Progress; import org.apache.hadoop.util.Progress;
import org.junit.After; import org.junit.After;
import org.junit.Before;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Rule; import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
@ -47,14 +48,21 @@
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
public class TestMapTask { public class TestMapTask {
private static File TEST_ROOT_DIR = new File( private static File testRootDir = new File(
System.getProperty("test.build.data", System.getProperty("test.build.data",
System.getProperty("java.io.tmpdir", "/tmp")), System.getProperty("java.io.tmpdir", "/tmp")),
TestMapTask.class.getName()); TestMapTask.class.getName());
@Before
public void setup() throws Exception {
if(!testRootDir.exists()) {
testRootDir.mkdirs();
}
}
@After @After
public void cleanup() throws Exception { public void cleanup() throws Exception {
FileUtil.fullyDelete(TEST_ROOT_DIR); FileUtil.fullyDelete(testRootDir);
} }
@Rule @Rule
@ -66,7 +74,7 @@ public void cleanup() throws Exception {
public void testShufflePermissions() throws Exception { public void testShufflePermissions() throws Exception {
JobConf conf = new JobConf(); JobConf conf = new JobConf();
conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077"); conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077");
conf.set(MRConfig.LOCAL_DIR, TEST_ROOT_DIR.getAbsolutePath()); conf.set(MRConfig.LOCAL_DIR, testRootDir.getAbsolutePath());
MapOutputFile mof = new MROutputFiles(); MapOutputFile mof = new MROutputFiles();
mof.setConf(conf); mof.setConf(conf);
TaskAttemptID attemptId = new TaskAttemptID("12345", 1, TaskType.MAP, 1, 1); TaskAttemptID attemptId = new TaskAttemptID("12345", 1, TaskType.MAP, 1, 1);
@ -98,7 +106,7 @@ public void testShufflePermissions() throws Exception {
public void testSpillFilesCountLimitInvalidValue() throws Exception { public void testSpillFilesCountLimitInvalidValue() throws Exception {
JobConf conf = new JobConf(); JobConf conf = new JobConf();
conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077"); conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077");
conf.set(MRConfig.LOCAL_DIR, TEST_ROOT_DIR.getAbsolutePath()); conf.set(MRConfig.LOCAL_DIR, testRootDir.getAbsolutePath());
conf.setInt(MRJobConfig.SPILL_FILES_COUNT_LIMIT, -2); conf.setInt(MRJobConfig.SPILL_FILES_COUNT_LIMIT, -2);
MapOutputFile mof = new MROutputFiles(); MapOutputFile mof = new MROutputFiles();
mof.setConf(conf); mof.setConf(conf);
@ -124,7 +132,7 @@ public void testSpillFilesCountLimitInvalidValue() throws Exception {
public void testSpillFilesCountBreach() throws Exception { public void testSpillFilesCountBreach() throws Exception {
JobConf conf = new JobConf(); JobConf conf = new JobConf();
conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077"); conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "077");
conf.set(MRConfig.LOCAL_DIR, TEST_ROOT_DIR.getAbsolutePath()); conf.set(MRConfig.LOCAL_DIR, testRootDir.getAbsolutePath());
conf.setInt(MRJobConfig.SPILL_FILES_COUNT_LIMIT, 2); conf.setInt(MRJobConfig.SPILL_FILES_COUNT_LIMIT, 2);
MapOutputFile mof = new MROutputFiles(); MapOutputFile mof = new MROutputFiles();
mof.setConf(conf); mof.setConf(conf);

View File

@ -35,6 +35,7 @@
import org.apache.hadoop.mapreduce.checkpoint.TaskCheckpointID; import org.apache.hadoop.mapreduce.checkpoint.TaskCheckpointID;
import org.apache.hadoop.util.ExitUtil; import org.apache.hadoop.util.ExitUtil;
import org.junit.After; import org.junit.After;
import org.junit.Before;
import org.junit.Assert; import org.junit.Assert;
import org.junit.Test; import org.junit.Test;
@ -180,6 +181,11 @@ protected void checkTaskLimits() throws TaskLimitException {
} }
} }
@Before
public void setup() {
statusUpdateTimes = 0;
}
@After @After
public void cleanup() { public void cleanup() {
FileSystem.clearStatistics(); FileSystem.clearStatistics();

View File

@ -158,6 +158,8 @@ public void setUp() throws Exception {
@After @After
public void tearDown() throws Exception { public void tearDown() throws Exception {
stopHttpServer(); stopHttpServer();
NotificationServlet.counter = 0;
NotificationServlet.failureCounter = 0;
super.tearDown(); super.tearDown();
} }

View File

@ -18,11 +18,16 @@
package org.apache.hadoop.mapred; package org.apache.hadoop.mapred;
import org.junit.After;
import org.junit.Assert; import org.junit.Assert;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.Test; import org.junit.Test;
import java.io.BufferedReader; import java.io.BufferedReader;
@ -34,12 +39,9 @@
import java.util.HashSet; import java.util.HashSet;
import java.util.Iterator; import java.util.Iterator;
import java.util.Set; import java.util.Set;
import java.util.UUID;
public class TestOldCombinerGrouping { public class TestOldCombinerGrouping {
private static String TEST_ROOT_DIR = new File(System.getProperty( private static File testRootDir = GenericTestUtils.getRandomizedTestDir();
"test.build.data", "build/test/data"), UUID.randomUUID().toString())
.getAbsolutePath();
public static class Map implements public static class Map implements
Mapper<LongWritable, Text, Text, LongWritable> { Mapper<LongWritable, Text, Text, LongWritable> {
@ -117,16 +119,21 @@ public int compare(Text o1, Text o2) {
} }
@After
public void cleanup() {
FileUtil.fullyDelete(testRootDir);
}
@Test @Test
public void testCombiner() throws Exception { public void testCombiner() throws Exception {
if (!new File(TEST_ROOT_DIR).mkdirs()) { if (!testRootDir.mkdirs()) {
throw new RuntimeException("Could not create test dir: " + TEST_ROOT_DIR); throw new RuntimeException("Could not create test dir: " + testRootDir);
} }
File in = new File(TEST_ROOT_DIR, "input"); File in = new File(testRootDir, "input");
if (!in.mkdirs()) { if (!in.mkdirs()) {
throw new RuntimeException("Could not create test dir: " + in); throw new RuntimeException("Could not create test dir: " + in);
} }
File out = new File(TEST_ROOT_DIR, "output"); File out = new File(testRootDir, "output");
PrintWriter pw = new PrintWriter(new FileWriter(new File(in, "data.txt"))); PrintWriter pw = new PrintWriter(new FileWriter(new File(in, "data.txt")));
pw.println("A|a,1"); pw.println("A|a,1");
pw.println("A|b,2"); pw.println("A|b,2");

View File

@ -18,7 +18,10 @@
package org.apache.hadoop.mapreduce; package org.apache.hadoop.mapreduce;
import org.junit.After;
import org.junit.Assert; import org.junit.Assert;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.RawComparator; import org.apache.hadoop.io.RawComparator;
@ -26,6 +29,8 @@
import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat; import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat; import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
import org.apache.hadoop.test.GenericTestUtils;
import org.junit.Test; import org.junit.Test;
import java.io.BufferedReader; import java.io.BufferedReader;
@ -36,12 +41,9 @@
import java.io.PrintWriter; import java.io.PrintWriter;
import java.util.HashSet; import java.util.HashSet;
import java.util.Set; import java.util.Set;
import java.util.UUID;
public class TestNewCombinerGrouping { public class TestNewCombinerGrouping {
private static String TEST_ROOT_DIR = new File(System.getProperty( private static File testRootDir = GenericTestUtils.getRandomizedTestDir();
"test.build.data", "build/test/data"), UUID.randomUUID().toString())
.getAbsolutePath();
public static class Map extends public static class Map extends
Mapper<LongWritable, Text, Text, LongWritable> { Mapper<LongWritable, Text, Text, LongWritable> {
@ -103,16 +105,21 @@ public int compare(Text o1, Text o2) {
} }
@After
public void cleanup() {
FileUtil.fullyDelete(testRootDir);
}
@Test @Test
public void testCombiner() throws Exception { public void testCombiner() throws Exception {
if (!new File(TEST_ROOT_DIR).mkdirs()) { if (!testRootDir.mkdirs()) {
throw new RuntimeException("Could not create test dir: " + TEST_ROOT_DIR); throw new RuntimeException("Could not create test dir: " + testRootDir);
} }
File in = new File(TEST_ROOT_DIR, "input"); File in = new File(testRootDir, "input");
if (!in.mkdirs()) { if (!in.mkdirs()) {
throw new RuntimeException("Could not create test dir: " + in); throw new RuntimeException("Could not create test dir: " + in);
} }
File out = new File(TEST_ROOT_DIR, "output"); File out = new File(testRootDir, "output");
PrintWriter pw = new PrintWriter(new FileWriter(new File(in, "data.txt"))); PrintWriter pw = new PrintWriter(new FileWriter(new File(in, "data.txt")));
pw.println("A|a,1"); pw.println("A|a,1");
pw.println("A|b,2"); pw.println("A|b,2");