diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml
index d5d3b8fd17..38e7d2756d 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/pom.xml
@@ -51,6 +51,21 @@
assertj-core
test
+
+ org.junit.jupiter
+ junit-jupiter-api
+ test
+
+
+ org.junit.jupiter
+ junit-jupiter-engine
+ test
+
+
+ org.junit.platform
+ junit-platform-launcher
+ test
+
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClient.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClient.java
index bf37b03b61..6c0be34ba4 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClient.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClient.java
@@ -22,22 +22,27 @@
import java.util.Collection;
import org.apache.hadoop.conf.Configuration;
+
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.ClusterStatus.BlackListInfo;
import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
public class TestJobClient {
final static String TEST_DIR = new File("target",
TestJobClient.class.getSimpleName()).getAbsolutePath();
- @After
+ @AfterEach
public void tearDown() {
FileUtil.fullyDelete(new File(TEST_DIR));
}
@@ -51,47 +56,48 @@ public void testGetClusterStatusWithLocalJobRunner() throws Exception {
ClusterStatus clusterStatus = client.getClusterStatus(true);
Collection activeTrackerNames = clusterStatus
.getActiveTrackerNames();
- Assert.assertEquals(0, activeTrackerNames.size());
+ assertEquals(0, activeTrackerNames.size());
int blacklistedTrackers = clusterStatus.getBlacklistedTrackers();
- Assert.assertEquals(0, blacklistedTrackers);
+ assertEquals(0, blacklistedTrackers);
Collection blackListedTrackersInfo = clusterStatus
.getBlackListedTrackersInfo();
- Assert.assertEquals(0, blackListedTrackersInfo.size());
+ assertEquals(0, blackListedTrackersInfo.size());
}
- @Test(timeout = 10000)
+ @Test
+ @Timeout(10000)
public void testIsJobDirValid() throws IOException {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.getLocal(conf);
Path testDir = new Path(TEST_DIR);
fs.mkdirs(testDir);
- Assert.assertFalse(JobClient.isJobDirValid(testDir, fs));
+ assertFalse(JobClient.isJobDirValid(testDir, fs));
Path jobconf = new Path(testDir, "job.xml");
Path jobsplit = new Path(testDir, "job.split");
fs.create(jobconf);
fs.create(jobsplit);
- Assert.assertTrue(JobClient.isJobDirValid(testDir, fs));
-
+ assertTrue(JobClient.isJobDirValid(testDir, fs));
+
fs.delete(jobconf, true);
fs.delete(jobsplit, true);
}
-
- @Test(timeout = 10000)
+
+ @Test
+ @Timeout(10000)
public void testGetStagingAreaDir() throws IOException, InterruptedException {
Configuration conf = new Configuration();
JobClient client = new JobClient(conf);
- Assert.assertTrue(
- "Mismatch in paths",
- client.getClusterHandle().getStagingAreaDir().toString()
- .equals(client.getStagingAreaDir().toString()));
+ assertEquals(client.getClusterHandle().getStagingAreaDir().toString(),
+ client.getStagingAreaDir().toString());
}
/**
* Asks the compiler to check if JobClient is AutoClosable.
*/
- @Test(timeout = 10000)
+ @Test
+ @Timeout(10000)
public void testAutoClosable() throws IOException {
Configuration conf = new Configuration();
try (JobClient jobClient = new JobClient(conf)) {
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClientGetJob.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClientGetJob.java
index 9ae7b61830..7c8bca7199 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClientGetJob.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestJobClientGetJob.java
@@ -18,15 +18,16 @@
package org.apache.hadoop.mapred;
-import static org.junit.Assert.assertNotNull;
-
import java.io.IOException;
+import org.junit.jupiter.api.Test;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import org.junit.Test;
+
+import static org.junit.jupiter.api.Assertions.assertNotNull;
public class TestJobClientGetJob {
@@ -42,7 +43,7 @@ private Path createTempFile(String filename, String contents)
os.close();
return path;
}
-
+
@SuppressWarnings("deprecation")
@Test
public void testGetRunningJobFromJobClient() throws Exception {
@@ -54,10 +55,10 @@ public void testGetRunningJobFromJobClient() throws Exception {
FileOutputFormat.setOutputPath(conf, outputDir);
JobClient jc = new JobClient(conf);
RunningJob runningJob = jc.submitJob(conf);
- assertNotNull("Running job", runningJob);
+ assertNotNull(runningJob, "Running job");
// Check that the running job can be retrieved by ID
RunningJob newRunningJob = jc.getJob(runningJob.getID());
- assertNotNull("New running job", newRunningJob);
+ assertNotNull(newRunningJob, "New running job");
}
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalDistributedCacheManager.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalDistributedCacheManager.java
index 50cc63094b..38aa4dd2f3 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalDistributedCacheManager.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalDistributedCacheManager.java
@@ -18,8 +18,8 @@
package org.apache.hadoop.mapred;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@@ -55,9 +55,9 @@
import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.util.functional.CallableRaisingIOE;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
@@ -109,7 +109,7 @@ private static void delete(File file) throws IOException {
}
}
- @Before
+ @BeforeEach
public void setup() throws Exception {
mockfs = mock(FileSystem.class);
localDir = new File(System.getProperty("test.build.dir", "target/test-dir"),
@@ -118,7 +118,7 @@ public void setup() throws Exception {
localDir.mkdirs();
}
- @After
+ @AfterEach
public void cleanup() throws Exception {
delete(localDir);
}
@@ -163,8 +163,8 @@ public void testDownload() throws Exception {
when(mockfs.getFileStatus(any(Path.class))).thenAnswer(new Answer() {
@Override
public FileStatus answer(InvocationOnMock args) throws Throwable {
- Path p = (Path)args.getArguments()[0];
- if("file.txt".equals(p.getName())) {
+ Path p = (Path) args.getArguments()[0];
+ if ("file.txt".equals(p.getName())) {
return createMockTestFileStatus(filePath);
} else {
throw notMocked(p);
@@ -180,7 +180,7 @@ public FileStatus answer(InvocationOnMock args) throws Throwable {
// anything else: FNFE
when(mockfs.openFile(any(Path.class))).thenAnswer(
(Answer) args -> {
- Path src = (Path)args.getArguments()[0];
+ Path src = (Path) args.getArguments()[0];
if ("file.txt".equals(src.getName())) {
return new MockOpenFileBuilder(mockfs, src,
() -> CompletableFuture.completedFuture(in));
@@ -228,15 +228,15 @@ public void testEmptyDownload() throws Exception {
when(mockfs.getFileStatus(any(Path.class))).thenAnswer(
(Answer) args -> {
- Path p = (Path)args.getArguments()[0];
+ Path p = (Path) args.getArguments()[0];
throw notMocked(p);
});
when(mockfs.getConf()).thenReturn(conf);
when(mockfs.openFile(any(Path.class))).thenAnswer(
(Answer) args -> {
- Path src = (Path)args.getArguments()[0];
- throw notMocked(src);
+ Path src = (Path) args.getArguments()[0];
+ throw notMocked(src);
});
conf.set(MRJobConfig.CACHE_FILES, "");
conf.set(MRConfig.LOCAL_DIR, localDir.getAbsolutePath());
@@ -272,8 +272,8 @@ public void testDuplicateDownload() throws Exception {
when(mockfs.getFileStatus(any(Path.class))).thenAnswer(new Answer() {
@Override
public FileStatus answer(InvocationOnMock args) throws Throwable {
- Path p = (Path)args.getArguments()[0];
- if("file.txt".equals(p.getName())) {
+ Path p = (Path) args.getArguments()[0];
+ if ("file.txt".equals(p.getName())) {
return createMockTestFileStatus(filePath);
} else {
throw notMocked(p);
@@ -286,7 +286,7 @@ public FileStatus answer(InvocationOnMock args) throws Throwable {
new FSDataInputStream(new MockInputStream(TEST_DATA));
when(mockfs.openFile(any(Path.class))).thenAnswer(
(Answer) args -> {
- Path src = (Path)args.getArguments()[0];
+ Path src = (Path) args.getArguments()[0];
if ("file.txt".equals(src.getName())) {
return new MockOpenFileBuilder(mockfs, src,
() -> CompletableFuture.completedFuture(in));
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalModeWithNewApis.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalModeWithNewApis.java
index 556f7fd716..628ff15095 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalModeWithNewApis.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestLocalModeWithNewApis.java
@@ -17,7 +17,8 @@
*/
package org.apache.hadoop.mapred;
-import static org.junit.Assert.*;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.BufferedReader;
import java.io.DataOutputStream;
@@ -38,9 +39,9 @@
import org.apache.hadoop.mapreduce.Reducer;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterEach;
+import org.junit.jupiter.api.BeforeEach;
+import org.junit.jupiter.api.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -51,13 +52,13 @@ public class TestLocalModeWithNewApis {
Configuration conf;
- @Before
+ @BeforeEach
public void setUp() throws Exception {
conf = new Configuration();
conf.set(MRConfig.FRAMEWORK_NAME, MRConfig.LOCAL_FRAMEWORK_NAME);
}
- @After
+ @AfterEach
public void tearDown() throws Exception {
}
@@ -93,8 +94,8 @@ public void testNewApis() throws Exception {
String output = readOutput(outDir, conf);
assertEquals("The\t1\nbrown\t1\nfox\t2\nhas\t1\nmany\t1\n" +
- "quick\t1\nred\t1\nsilly\t1\nsox\t1\n", output);
-
+ "quick\t1\nred\t1\nsilly\t1\nsox\t1\n", output);
+
outFs.delete(tmpBaseDir, true);
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestMRWithDistributedCache.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestMRWithDistributedCache.java
index 62b8815e6f..5e60829647 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestMRWithDistributedCache.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapred/TestMRWithDistributedCache.java
@@ -27,9 +27,8 @@
import java.util.jar.JarOutputStream;
import java.util.zip.ZipEntry;
-import org.junit.Assert;
-import org.junit.Test;
-import static org.junit.Assert.*;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
@@ -50,6 +49,13 @@
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+
/**
* Tests the use of the
* {@link org.apache.hadoop.mapreduce.filecache.DistributedCache} within the
@@ -96,23 +102,23 @@ public void setup(TaskInputOutputContext, ?, ?, ?> context)
FileSystem fs = LocalFileSystem.get(conf);
// Check that 2 files and 2 archives are present
- Assert.assertEquals(2, localFiles.length);
- Assert.assertEquals(2, localArchives.length);
- Assert.assertEquals(2, files.length);
- Assert.assertEquals(2, archives.length);
+ assertEquals(2, localFiles.length);
+ assertEquals(2, localArchives.length);
+ assertEquals(2, files.length);
+ assertEquals(2, archives.length);
// Check the file name
- Assert.assertTrue(files[0].getPath().endsWith("distributed.first"));
- Assert.assertTrue(files[1].getPath().endsWith("distributed.second.jar"));
+ assertTrue(files[0].getPath().endsWith("distributed.first"));
+ assertTrue(files[1].getPath().endsWith("distributed.second.jar"));
// Check lengths of the files
- Assert.assertEquals(1, fs.getFileStatus(localFiles[0]).getLen());
- Assert.assertTrue(fs.getFileStatus(localFiles[1]).getLen() > 1);
+ assertEquals(1, fs.getFileStatus(localFiles[0]).getLen());
+ assertTrue(fs.getFileStatus(localFiles[1]).getLen() > 1);
// Check extraction of the archive
- Assert.assertTrue(fs.exists(new Path(localArchives[0],
+ assertTrue(fs.exists(new Path(localArchives[0],
"distributed.jar.inside3")));
- Assert.assertTrue(fs.exists(new Path(localArchives[1],
+ assertTrue(fs.exists(new Path(localArchives[1],
"distributed.jar.inside4")));
// Check the class loaders
@@ -120,19 +126,20 @@ public void setup(TaskInputOutputContext, ?, ?, ?> context)
ClassLoader cl = Thread.currentThread().getContextClassLoader();
// Both the file and the archive were added to classpath, so both
// should be reachable via the class loader.
- Assert.assertNotNull(cl.getResource("distributed.jar.inside2"));
- Assert.assertNotNull(cl.getResource("distributed.jar.inside3"));
- Assert.assertNull(cl.getResource("distributed.jar.inside4"));
+ assertNotNull(cl.getResource("distributed.jar.inside2"));
+ assertNotNull(cl.getResource("distributed.jar.inside3"));
+ assertNull(cl.getResource("distributed.jar.inside4"));
// Check that the symlink for the renaming was created in the cwd;
- Assert.assertTrue("symlink distributed.first.symlink doesn't exist",
- symlinkFile.exists());
- Assert.assertEquals("symlink distributed.first.symlink length not 1", 1,
- symlinkFile.length());
+ assertTrue(symlinkFile.exists(),
+ "symlink distributed.first.symlink doesn't exist");
+ assertEquals(1,
+ symlinkFile.length(),
+ "symlink distributed.first.symlink length not 1");
//This last one is a difference between MRv2 and MRv1
- Assert.assertTrue("second file should be symlinked too",
- expectedAbsentSymlinkFile.exists());
+ assertTrue(expectedAbsentSymlinkFile.exists(),
+ "second file should be symlinked too");
}
}
@@ -190,16 +197,16 @@ private void testWithConf(Configuration conf) throws IOException,
@Test
public void testLocalJobRunner() throws Exception {
symlinkFile.delete(); // ensure symlink is not present (e.g. if test is
- // killed part way through)
+ // killed part way through)
Configuration c = new Configuration();
c.set(JTConfig.JT_IPC_ADDRESS, "local");
c.set("fs.defaultFS", "file:///");
testWithConf(c);
-
- assertFalse("Symlink not removed by local job runner",
- // Symlink target will have gone so can't use File.exists()
- Arrays.asList(new File(".").list()).contains(symlinkFile.getName()));
+
+ assertFalse(Arrays.asList(new File(".").list()).contains(symlinkFile.getName()),
+ // Symlink target will have gone so can't use File.exists()
+ "Symlink not removed by local job runner");
}
private Path createTempFile(String filename, String contents)
@@ -223,92 +230,93 @@ private Path makeJar(Path p, int index) throws FileNotFoundException,
return p;
}
- @Test (timeout = 10000)
+ @Test
+ @Timeout(10000)
public void testDeprecatedFunctions() throws Exception {
DistributedCache.addLocalArchives(conf, "Test Local Archives 1");
- Assert.assertEquals("Test Local Archives 1",
+ assertEquals("Test Local Archives 1",
conf.get(DistributedCache.CACHE_LOCALARCHIVES));
- Assert.assertEquals(1,
+ assertEquals(1,
JobContextImpl.getLocalCacheArchives(conf).length);
- Assert.assertEquals("Test Local Archives 1",
+ assertEquals("Test Local Archives 1",
JobContextImpl.getLocalCacheArchives(conf)[0].getName());
DistributedCache.addLocalArchives(conf, "Test Local Archives 2");
- Assert.assertEquals("Test Local Archives 1,Test Local Archives 2",
+ assertEquals("Test Local Archives 1,Test Local Archives 2",
conf.get(DistributedCache.CACHE_LOCALARCHIVES));
- Assert.assertEquals(2,
+ assertEquals(2,
JobContextImpl.getLocalCacheArchives(conf).length);
- Assert.assertEquals("Test Local Archives 2",
+ assertEquals("Test Local Archives 2",
JobContextImpl.getLocalCacheArchives(conf)[1].getName());
DistributedCache.setLocalArchives(conf, "Test Local Archives 3");
- Assert.assertEquals("Test Local Archives 3",
+ assertEquals("Test Local Archives 3",
conf.get(DistributedCache.CACHE_LOCALARCHIVES));
- Assert.assertEquals(1,
+ assertEquals(1,
JobContextImpl.getLocalCacheArchives(conf).length);
- Assert.assertEquals("Test Local Archives 3",
+ assertEquals("Test Local Archives 3",
JobContextImpl.getLocalCacheArchives(conf)[0].getName());
DistributedCache.addLocalFiles(conf, "Test Local Files 1");
- Assert.assertEquals("Test Local Files 1",
+ assertEquals("Test Local Files 1",
conf.get(DistributedCache.CACHE_LOCALFILES));
- Assert.assertEquals(1,
+ assertEquals(1,
JobContextImpl.getLocalCacheFiles(conf).length);
- Assert.assertEquals("Test Local Files 1",
+ assertEquals("Test Local Files 1",
JobContextImpl.getLocalCacheFiles(conf)[0].getName());
DistributedCache.addLocalFiles(conf, "Test Local Files 2");
- Assert.assertEquals("Test Local Files 1,Test Local Files 2",
+ assertEquals("Test Local Files 1,Test Local Files 2",
conf.get(DistributedCache.CACHE_LOCALFILES));
- Assert.assertEquals(2,
+ assertEquals(2,
JobContextImpl.getLocalCacheFiles(conf).length);
- Assert.assertEquals("Test Local Files 2",
+ assertEquals("Test Local Files 2",
JobContextImpl.getLocalCacheFiles(conf)[1].getName());
DistributedCache.setLocalFiles(conf, "Test Local Files 3");
- Assert.assertEquals("Test Local Files 3",
+ assertEquals("Test Local Files 3",
conf.get(DistributedCache.CACHE_LOCALFILES));
- Assert.assertEquals(1,
+ assertEquals(1,
JobContextImpl.getLocalCacheFiles(conf).length);
- Assert.assertEquals("Test Local Files 3",
+ assertEquals("Test Local Files 3",
JobContextImpl.getLocalCacheFiles(conf)[0].getName());
DistributedCache.setArchiveTimestamps(conf, "1234567890");
- Assert.assertEquals(1234567890,
+ assertEquals(1234567890,
conf.getLong(DistributedCache.CACHE_ARCHIVES_TIMESTAMPS, 0));
- Assert.assertEquals(1,
+ assertEquals(1,
JobContextImpl.getArchiveTimestamps(conf).length);
- Assert.assertEquals(1234567890,
+ assertEquals(1234567890,
JobContextImpl.getArchiveTimestamps(conf)[0]);
DistributedCache.setFileTimestamps(conf, "1234567890");
- Assert.assertEquals(1234567890,
+ assertEquals(1234567890,
conf.getLong(DistributedCache.CACHE_FILES_TIMESTAMPS, 0));
- Assert.assertEquals(1,
+ assertEquals(1,
JobContextImpl.getFileTimestamps(conf).length);
- Assert.assertEquals(1234567890,
+ assertEquals(1234567890,
JobContextImpl.getFileTimestamps(conf)[0]);
DistributedCache.createAllSymlink(conf, new File("Test Job Cache Dir"),
new File("Test Work Dir"));
- Assert.assertNull(conf.get(DistributedCache.CACHE_SYMLINK));
- Assert.assertTrue(DistributedCache.getSymlink(conf));
+ assertNull(conf.get(DistributedCache.CACHE_SYMLINK));
+ assertTrue(DistributedCache.getSymlink(conf));
- Assert.assertTrue(symlinkFile.createNewFile());
+ assertTrue(symlinkFile.createNewFile());
FileStatus fileStatus =
DistributedCache.getFileStatus(conf, symlinkFile.toURI());
- Assert.assertNotNull(fileStatus);
- Assert.assertEquals(fileStatus.getModificationTime(),
+ assertNotNull(fileStatus);
+ assertEquals(fileStatus.getModificationTime(),
DistributedCache.getTimestamp(conf, symlinkFile.toURI()));
- Assert.assertTrue(symlinkFile.delete());
+ assertTrue(symlinkFile.delete());
Job.addCacheArchive(symlinkFile.toURI(), conf);
- Assert.assertEquals(symlinkFile.toURI().toString(),
+ assertEquals(symlinkFile.toURI().toString(),
conf.get(DistributedCache.CACHE_ARCHIVES));
- Assert.assertEquals(1, JobContextImpl.getCacheArchives(conf).length);
- Assert.assertEquals(symlinkFile.toURI(),
+ assertEquals(1, JobContextImpl.getCacheArchives(conf).length);
+ assertEquals(symlinkFile.toURI(),
JobContextImpl.getCacheArchives(conf)[0]);
Job.addCacheFile(symlinkFile.toURI(), conf);
- Assert.assertEquals(symlinkFile.toURI().toString(),
+ assertEquals(symlinkFile.toURI().toString(),
conf.get(DistributedCache.CACHE_FILES));
- Assert.assertEquals(1, JobContextImpl.getCacheFiles(conf).length);
- Assert.assertEquals(symlinkFile.toURI(),
+ assertEquals(1, JobContextImpl.getCacheFiles(conf).length);
+ assertEquals(symlinkFile.toURI(),
JobContextImpl.getCacheFiles(conf)[0]);
}
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java
index 737918473e..205a2330a1 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/TestTypeConverter.java
@@ -20,6 +20,9 @@
import org.apache.hadoop.util.StringUtils;
import static org.assertj.core.api.Assertions.assertThat;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@@ -42,8 +45,7 @@
import org.apache.hadoop.yarn.api.records.Resource;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.util.Records;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
import org.mockito.Mockito;
public class TestTypeConverter {
@@ -53,26 +55,26 @@ public void testEnums() throws Exception {
TypeConverter.fromYarn(applicationState, FinalApplicationStatus.FAILED);
}
// ad hoc test of NEW_SAVING, which is newly added
- Assert.assertEquals(State.PREP, TypeConverter.fromYarn(
+ assertEquals(State.PREP, TypeConverter.fromYarn(
YarnApplicationState.NEW_SAVING, FinalApplicationStatus.FAILED));
-
+
for (TaskType taskType : TaskType.values()) {
TypeConverter.fromYarn(taskType);
}
-
+
for (JobState jobState : JobState.values()) {
TypeConverter.fromYarn(jobState);
}
-
+
for (QueueState queueState : QueueState.values()) {
TypeConverter.fromYarn(queueState);
}
-
+
for (TaskState taskState : TaskState.values()) {
TypeConverter.fromYarn(taskState);
}
}
-
+
@Test
public void testFromYarn() throws Exception {
int appStartTime = 612354;
@@ -98,10 +100,10 @@ public void testFromYarn() throws Exception {
appUsageRpt.setUsedResources(r);
applicationReport.setApplicationResourceUsageReport(appUsageRpt);
JobStatus jobStatus = TypeConverter.fromYarn(applicationReport, "dummy-jobfile");
- Assert.assertEquals(appStartTime, jobStatus.getStartTime());
- Assert.assertEquals(appFinishTime, jobStatus.getFinishTime());
- Assert.assertEquals(state.toString(), jobStatus.getState().toString());
- Assert.assertEquals(JobPriority.NORMAL, jobStatus.getPriority());
+ assertEquals(appStartTime, jobStatus.getStartTime());
+ assertEquals(appFinishTime, jobStatus.getFinishTime());
+ assertEquals(state.toString(), jobStatus.getState().toString());
+ assertEquals(JobPriority.NORMAL, jobStatus.getPriority());
}
@Test
@@ -122,7 +124,7 @@ public void testFromYarnApplicationReport() {
try {
JobStatus status = TypeConverter.fromYarn(mockReport, jobFile);
} catch (NullPointerException npe) {
- Assert.fail("Type converstion from YARN fails for jobs without " +
+ fail("Type converstion from YARN fails for jobs without " +
"ApplicationUsageReport");
}
@@ -137,20 +139,21 @@ public void testFromYarnApplicationReport() {
appUsageRpt.setUsedResources(r);
when(mockReport.getApplicationResourceUsageReport()).thenReturn(appUsageRpt);
JobStatus status = TypeConverter.fromYarn(mockReport, jobFile);
- Assert.assertNotNull("fromYarn returned null status", status);
- Assert.assertEquals("jobFile set incorrectly", "dummy-path/job.xml", status.getJobFile());
- Assert.assertEquals("queue set incorrectly", "dummy-queue", status.getQueue());
- Assert.assertEquals("trackingUrl set incorrectly", "dummy-tracking-url", status.getTrackingUrl());
- Assert.assertEquals("user set incorrectly", "dummy-user", status.getUsername());
- Assert.assertEquals("schedulingInfo set incorrectly", "dummy-tracking-url", status.getSchedulingInfo());
- Assert.assertEquals("jobId set incorrectly", 6789, status.getJobID().getId());
- Assert.assertEquals("state set incorrectly", JobStatus.State.KILLED, status.getState());
- Assert.assertEquals("needed mem info set incorrectly", 2048, status.getNeededMem());
- Assert.assertEquals("num rsvd slots info set incorrectly", 1, status.getNumReservedSlots());
- Assert.assertEquals("num used slots info set incorrectly", 3, status.getNumUsedSlots());
- Assert.assertEquals("rsvd mem info set incorrectly", 2048, status.getReservedMem());
- Assert.assertEquals("used mem info set incorrectly", 2048, status.getUsedMem());
- Assert.assertEquals("priority set incorrectly", JobPriority.HIGH, status.getPriority());
+ assertNotNull(status, "fromYarn returned null status");
+ assertEquals("dummy-path/job.xml", status.getJobFile(), "jobFile set incorrectly");
+ assertEquals("dummy-queue", status.getQueue(), "queue set incorrectly");
+ assertEquals("dummy-tracking-url", status.getTrackingUrl(), "trackingUrl set incorrectly");
+ assertEquals("dummy-user", status.getUsername(), "user set incorrectly");
+ assertEquals("dummy-tracking-url", status.getSchedulingInfo(),
+ "schedulingInfo set incorrectly");
+ assertEquals(6789, status.getJobID().getId(), "jobId set incorrectly");
+ assertEquals(JobStatus.State.KILLED, status.getState(), "state set incorrectly");
+ assertEquals(2048, status.getNeededMem(), "needed mem info set incorrectly");
+ assertEquals(1, status.getNumReservedSlots(), "num rsvd slots info set incorrectly");
+ assertEquals(3, status.getNumUsedSlots(), "num used slots info set incorrectly");
+ assertEquals(2048, status.getReservedMem(), "rsvd mem info set incorrectly");
+ assertEquals(2048, status.getUsedMem(), "used mem info set incorrectly");
+ assertEquals(JobPriority.HIGH, status.getPriority(), "priority set incorrectly");
}
@Test
@@ -160,9 +163,9 @@ public void testFromYarnQueueInfo() {
queueInfo.setQueueState(org.apache.hadoop.yarn.api.records.QueueState.STOPPED);
org.apache.hadoop.mapreduce.QueueInfo returned =
TypeConverter.fromYarn(queueInfo, new Configuration());
- Assert.assertEquals("queueInfo translation didn't work.",
- returned.getState().toString(),
- StringUtils.toLowerCase(queueInfo.getQueueState().toString()));
+ assertEquals(returned.getState().toString(),
+ StringUtils.toLowerCase(queueInfo.getQueueState().toString()),
+ "queueInfo translation didn't work.");
}
/**
@@ -173,21 +176,21 @@ public void testFromYarnQueueInfo() {
public void testFromYarnQueue() {
//Define child queue
org.apache.hadoop.yarn.api.records.QueueInfo child =
- Mockito.mock(org.apache.hadoop.yarn.api.records.QueueInfo.class);
+ Mockito.mock(org.apache.hadoop.yarn.api.records.QueueInfo.class);
Mockito.when(child.getQueueState()).thenReturn(QueueState.RUNNING);
//Define parent queue
org.apache.hadoop.yarn.api.records.QueueInfo queueInfo =
- Mockito.mock(org.apache.hadoop.yarn.api.records.QueueInfo.class);
+ Mockito.mock(org.apache.hadoop.yarn.api.records.QueueInfo.class);
List children =
- new ArrayList();
+ new ArrayList();
children.add(child); //Add one child
Mockito.when(queueInfo.getChildQueues()).thenReturn(children);
Mockito.when(queueInfo.getQueueState()).thenReturn(QueueState.RUNNING);
//Call the function we're testing
org.apache.hadoop.mapreduce.QueueInfo returned =
- TypeConverter.fromYarn(queueInfo, new Configuration());
+ TypeConverter.fromYarn(queueInfo, new Configuration());
//Verify that the converted queue has the 1 child we had added
assertThat(returned.getQueueChildren().size())
@@ -204,7 +207,7 @@ public void testFromYarnJobReport() throws Exception {
JobReport jobReport = Records.newRecord(JobReport.class);
ApplicationId applicationId = ApplicationId.newInstance(0, 0);
jobId.setAppId(applicationId);
- jobId.setId(0);
+ jobId.setId(0);
jobReport.setJobId(jobId);
jobReport.setJobState(state);
jobReport.setStartTime(jobStartTime);
@@ -212,9 +215,9 @@ public void testFromYarnJobReport() throws Exception {
jobReport.setUser("TestTypeConverter-user");
jobReport.setJobPriority(Priority.newInstance(0));
JobStatus jobStatus = TypeConverter.fromYarn(jobReport, "dummy-jobfile");
- Assert.assertEquals(jobStartTime, jobStatus.getStartTime());
- Assert.assertEquals(jobFinishTime, jobStatus.getFinishTime());
- Assert.assertEquals(state.toString(), jobStatus.getState().toString());
- Assert.assertEquals(JobPriority.DEFAULT, jobStatus.getPriority());
+ assertEquals(jobStartTime, jobStatus.getStartTime());
+ assertEquals(jobFinishTime, jobStatus.getFinishTime());
+ assertEquals(state.toString(), jobStatus.getState().toString());
+ assertEquals(JobPriority.DEFAULT, jobStatus.getPriority());
}
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRPCFactories.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRPCFactories.java
index 200f7ac255..8cd3476717 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRPCFactories.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRPCFactories.java
@@ -22,9 +22,9 @@
import java.io.IOException;
import java.net.InetSocketAddress;
-import org.junit.Assert;
-
import org.apache.hadoop.conf.Configuration;
+
+import static org.junit.jupiter.api.Assertions.fail;
import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.mapreduce.v2.api.MRClientProtocol;
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.CancelDelegationTokenRequest;
@@ -59,16 +59,15 @@
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.factories.impl.pb.RpcClientFactoryPBImpl;
import org.apache.hadoop.yarn.factories.impl.pb.RpcServerFactoryPBImpl;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
public class TestRPCFactories {
-
-
-
+
+
@Test
public void test() {
testPbServerFactory();
-
+
testPbClientFactory();
}
@@ -86,7 +85,7 @@ private void testPbServerFactory() {
server.start();
} catch (YarnRuntimeException e) {
e.printStackTrace();
- Assert.fail("Failed to crete server");
+ fail("Failed to crete server");
} finally {
server.stop();
}
@@ -112,12 +111,12 @@ private void testPbClientFactory() {
client = (MRClientProtocol) RpcClientFactoryPBImpl.get().getClient(MRClientProtocol.class, 1, NetUtils.getConnectAddress(server), conf);
} catch (YarnRuntimeException e) {
e.printStackTrace();
- Assert.fail("Failed to crete client");
+ fail("Failed to crete client");
}
} catch (YarnRuntimeException e) {
e.printStackTrace();
- Assert.fail("Failed to crete server");
+ fail("Failed to crete server");
} finally {
server.stop();
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRecordFactory.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRecordFactory.java
index 2e793220f0..953c7a2427 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRecordFactory.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/TestRecordFactory.java
@@ -18,37 +18,40 @@
package org.apache.hadoop.mapreduce.v2;
-import org.junit.Assert;
-
import org.apache.hadoop.yarn.exceptions.YarnRuntimeException;
import org.apache.hadoop.yarn.factories.RecordFactory;
import org.apache.hadoop.yarn.factories.impl.pb.RecordFactoryPBImpl;
+
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.GetCountersRequest;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.fail;
+
import org.apache.hadoop.mapreduce.v2.api.protocolrecords.impl.pb.GetCountersRequestPBImpl;
import org.apache.hadoop.mapreduce.v2.api.records.CounterGroup;
import org.apache.hadoop.mapreduce.v2.api.records.impl.pb.CounterGroupPBImpl;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
public class TestRecordFactory {
-
+
@Test
public void testPbRecordFactory() {
RecordFactory pbRecordFactory = RecordFactoryPBImpl.get();
-
+
try {
CounterGroup response = pbRecordFactory.newRecordInstance(CounterGroup.class);
- Assert.assertEquals(CounterGroupPBImpl.class, response.getClass());
+ assertEquals(CounterGroupPBImpl.class, response.getClass());
} catch (YarnRuntimeException e) {
e.printStackTrace();
- Assert.fail("Failed to crete record");
+ fail("Failed to crete record");
}
-
+
try {
GetCountersRequest response = pbRecordFactory.newRecordInstance(GetCountersRequest.class);
- Assert.assertEquals(GetCountersRequestPBImpl.class, response.getClass());
+ assertEquals(GetCountersRequestPBImpl.class, response.getClass());
} catch (YarnRuntimeException e) {
e.printStackTrace();
- Assert.fail("Failed to crete record");
+ fail("Failed to crete record");
}
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/api/records/TestIds.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/api/records/TestIds.java
index b12925f297..ee2d014872 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/api/records/TestIds.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/api/records/TestIds.java
@@ -19,13 +19,14 @@
package org.apache.hadoop.mapreduce.v2.api.records;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertTrue;
-
import org.apache.hadoop.mapreduce.v2.util.MRBuilderUtils;
import org.apache.hadoop.yarn.api.records.ApplicationId;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
+
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotEquals;
+import static org.junit.jupiter.api.Assertions.assertTrue;
public class TestIds {
@@ -38,9 +39,9 @@ public void testJobId() {
JobId j3 = createJobId(ts2, 1);
JobId j4 = createJobId(ts1, 2);
- assertTrue(j1.equals(j4));
- assertFalse(j1.equals(j2));
- assertFalse(j1.equals(j3));
+ assertEquals(j1, j4);
+ assertNotEquals(j1, j2);
+ assertNotEquals(j1, j3);
assertTrue(j1.compareTo(j4) == 0);
assertTrue(j1.compareTo(j2) > 0);
@@ -65,10 +66,10 @@ public void testTaskId() {
TaskId t4 = createTaskId(ts1, 1, 2, TaskType.MAP);
TaskId t5 = createTaskId(ts2, 1, 1, TaskType.MAP);
- assertTrue(t1.equals(t4));
- assertFalse(t1.equals(t2));
- assertFalse(t1.equals(t3));
- assertFalse(t1.equals(t5));
+ assertEquals(t1, t4);
+ assertNotEquals(t1, t2);
+ assertNotEquals(t1, t3);
+ assertNotEquals(t1, t5);
assertTrue(t1.compareTo(t4) == 0);
assertTrue(t1.compareTo(t2) < 0);
@@ -96,10 +97,10 @@ public void testTaskAttemptId() {
TaskAttemptId t5 = createTaskAttemptId(ts1, 2, 1, TaskType.MAP, 3);
TaskAttemptId t6 = createTaskAttemptId(ts1, 2, 2, TaskType.MAP, 2);
- assertTrue(t1.equals(t6));
- assertFalse(t1.equals(t2));
- assertFalse(t1.equals(t3));
- assertFalse(t1.equals(t5));
+ assertEquals(t1, t6);
+ assertNotEquals(t1, t2);
+ assertNotEquals(t1, t3);
+ assertNotEquals(t1, t5);
assertTrue(t1.compareTo(t6) == 0);
assertTrue(t1.compareTo(t2) < 0);
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/jobhistory/TestFileNameIndexUtils.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/jobhistory/TestFileNameIndexUtils.java
index f9322d86d1..478eefea69 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/jobhistory/TestFileNameIndexUtils.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/jobhistory/TestFileNameIndexUtils.java
@@ -20,13 +20,13 @@
import java.io.IOException;
import static java.nio.charset.StandardCharsets.UTF_8;
+import static org.junit.jupiter.api.Assertions.*;
import org.apache.hadoop.mapreduce.JobID;
import org.apache.hadoop.mapreduce.TypeConverter;
import org.apache.hadoop.mapreduce.v2.api.records.JobId;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
public class TestFileNameIndexUtils {
@@ -105,26 +105,26 @@ public void testEncodingDecodingEquivalence() throws IOException {
String jobHistoryFile = FileNameIndexUtils.getDoneFileName(info);
JobIndexInfo parsedInfo = FileNameIndexUtils.getIndexInfo(jobHistoryFile);
- Assert.assertEquals("Job id different after encoding and decoding",
- info.getJobId(), parsedInfo.getJobId());
- Assert.assertEquals("Submit time different after encoding and decoding",
- info.getSubmitTime(), parsedInfo.getSubmitTime());
- Assert.assertEquals("User different after encoding and decoding",
- info.getUser(), parsedInfo.getUser());
- Assert.assertEquals("Job name different after encoding and decoding",
- info.getJobName(), parsedInfo.getJobName());
- Assert.assertEquals("Finish time different after encoding and decoding",
- info.getFinishTime(), parsedInfo.getFinishTime());
- Assert.assertEquals("Num maps different after encoding and decoding",
- info.getNumMaps(), parsedInfo.getNumMaps());
- Assert.assertEquals("Num reduces different after encoding and decoding",
- info.getNumReduces(), parsedInfo.getNumReduces());
- Assert.assertEquals("Job status different after encoding and decoding",
- info.getJobStatus(), parsedInfo.getJobStatus());
- Assert.assertEquals("Queue name different after encoding and decoding",
- info.getQueueName(), parsedInfo.getQueueName());
- Assert.assertEquals("Job start time different after encoding and decoding",
- info.getJobStartTime(), parsedInfo.getJobStartTime());
+ assertEquals(info.getJobId(), parsedInfo.getJobId(),
+ "Job id different after encoding and decoding");
+ assertEquals(info.getSubmitTime(), parsedInfo.getSubmitTime(),
+ "Submit time different after encoding and decoding");
+ assertEquals(info.getUser(), parsedInfo.getUser(),
+ "User different after encoding and decoding");
+ assertEquals(info.getJobName(), parsedInfo.getJobName(),
+ "Job name different after encoding and decoding");
+ assertEquals(info.getFinishTime(), parsedInfo.getFinishTime(),
+ "Finish time different after encoding and decoding");
+ assertEquals(info.getNumMaps(), parsedInfo.getNumMaps(),
+ "Num maps different after encoding and decoding");
+ assertEquals(info.getNumReduces(), parsedInfo.getNumReduces(),
+ "Num reduces different after encoding and decoding");
+ assertEquals(info.getJobStatus(), parsedInfo.getJobStatus(),
+ "Job status different after encoding and decoding");
+ assertEquals(info.getQueueName(), parsedInfo.getQueueName(),
+ "Queue name different after encoding and decoding");
+ assertEquals(info.getJobStartTime(), parsedInfo.getJobStartTime(),
+ "Job start time different after encoding and decoding");
}
@Test
@@ -144,8 +144,8 @@ public void testUserNamePercentEncoding() throws IOException {
info.setJobStartTime(Long.parseLong(JOB_START_TIME));
String jobHistoryFile = FileNameIndexUtils.getDoneFileName(info);
- Assert.assertTrue("User name not encoded correctly into job history file",
- jobHistoryFile.contains(USER_NAME_WITH_DELIMITER_ESCAPE));
+ assertTrue(jobHistoryFile.contains(USER_NAME_WITH_DELIMITER_ESCAPE),
+ "User name not encoded correctly into job history file");
}
@Test
@@ -166,12 +166,12 @@ public void testTrimJobName() throws IOException {
info.setJobStartTime(Long.parseLong(JOB_START_TIME));
String jobHistoryFile =
- FileNameIndexUtils.getDoneFileName(info, jobNameTrimLength);
+ FileNameIndexUtils.getDoneFileName(info, jobNameTrimLength);
JobIndexInfo parsedInfo = FileNameIndexUtils.getIndexInfo(jobHistoryFile);
- Assert.assertEquals("Job name did not get trimmed correctly",
- info.getJobName().substring(0, jobNameTrimLength),
- parsedInfo.getJobName());
+ assertEquals(info.getJobName().substring(0, jobNameTrimLength),
+ parsedInfo.getJobName(),
+ "Job name did not get trimmed correctly");
}
/**
@@ -206,17 +206,17 @@ public void testJobNameWithMultibyteChars() throws IOException {
String jobHistoryFile =
FileNameIndexUtils.getDoneFileName(info, 50);
- Assert.assertTrue(jobHistoryFile.length() <= 255);
+ assertTrue(jobHistoryFile.length() <= 255);
String trimedJobName = jobHistoryFile.split(
FileNameIndexUtils.DELIMITER)[3]; // 3 is index of job name
// 3 x 16 < 50 < 3 x 17 so the length of trimedJobName should be 48
- Assert.assertEquals(48, trimedJobName.getBytes(UTF_8).length);
+ assertEquals(48, trimedJobName.getBytes(UTF_8).length);
// validate whether trimmedJobName by testing reversibility
byte[] trimedJobNameInByte = trimedJobName.getBytes(UTF_8);
String reEncodedTrimedJobName = new String(trimedJobNameInByte, UTF_8);
- Assert.assertArrayEquals(trimedJobNameInByte,
+ assertArrayEquals(trimedJobNameInByte,
reEncodedTrimedJobName.getBytes(UTF_8));
sb.setLength(0);
@@ -231,17 +231,17 @@ public void testJobNameWithMultibyteChars() throws IOException {
jobHistoryFile =
FileNameIndexUtils.getDoneFileName(info, 27);
- Assert.assertTrue(jobHistoryFile.length() <= 255);
+ assertTrue(jobHistoryFile.length() <= 255);
trimedJobName = jobHistoryFile.split(
FileNameIndexUtils.DELIMITER)[3]; // 3 is index of job name
// 6 x 4 < 27 < 6 x 5 so the length of trimedJobName should be 24
- Assert.assertEquals(24, trimedJobName.getBytes(UTF_8).length);
+ assertEquals(24, trimedJobName.getBytes(UTF_8).length);
// validate whether trimmedJobName by testing reversibility
trimedJobNameInByte = trimedJobName.getBytes(UTF_8);
reEncodedTrimedJobName = new String(trimedJobNameInByte, UTF_8);
- Assert.assertArrayEquals(trimedJobNameInByte,
+ assertArrayEquals(trimedJobNameInByte,
reEncodedTrimedJobName.getBytes(UTF_8));
sb.setLength(0);
@@ -256,17 +256,17 @@ public void testJobNameWithMultibyteChars() throws IOException {
jobHistoryFile =
FileNameIndexUtils.getDoneFileName(info, 40);
- Assert.assertTrue(jobHistoryFile.length() <= 255);
+ assertTrue(jobHistoryFile.length() <= 255);
trimedJobName = jobHistoryFile.split(
FileNameIndexUtils.DELIMITER)[3]; // 3 is index of job name
// 9 x 4 < 40 < 9 x 5 so the length of trimedJobName should be 36
- Assert.assertEquals(36, trimedJobName.getBytes(UTF_8).length);
+ assertEquals(36, trimedJobName.getBytes(UTF_8).length);
// validate whether trimmedJobName by testing reversibility
trimedJobNameInByte = trimedJobName.getBytes(UTF_8);
reEncodedTrimedJobName = new String(trimedJobNameInByte, UTF_8);
- Assert.assertArrayEquals(trimedJobNameInByte,
+ assertArrayEquals(trimedJobNameInByte,
reEncodedTrimedJobName.getBytes(UTF_8));
sb.setLength(0);
@@ -281,29 +281,29 @@ public void testJobNameWithMultibyteChars() throws IOException {
jobHistoryFile =
FileNameIndexUtils.getDoneFileName(info, 49);
- Assert.assertTrue(jobHistoryFile.length() <= 255);
+ assertTrue(jobHistoryFile.length() <= 255);
trimedJobName = jobHistoryFile.split(
FileNameIndexUtils.DELIMITER)[3]; // 3 is index of job name
// 12 x 4 < 49 < 12 x 5 so the length of trimedJobName should be 48
- Assert.assertEquals(48, trimedJobName.getBytes(UTF_8).length);
+ assertEquals(48, trimedJobName.getBytes(UTF_8).length);
// validate whether trimmedJobName by testing reversibility
trimedJobNameInByte = trimedJobName.getBytes(UTF_8);
reEncodedTrimedJobName = new String(trimedJobNameInByte, UTF_8);
- Assert.assertArrayEquals(trimedJobNameInByte,
+ assertArrayEquals(trimedJobNameInByte,
reEncodedTrimedJobName.getBytes(UTF_8));
sb.setLength(0);
// Test for the combination of 1 to 4 bytes UTF-8 characters
sb.append('\u732B') // cat in Kanji (encoded into 3 bytes x 3 characters)
- .append("[") // (encoded into 1 byte x 3 characters)
- .append('\u03BB') // small lambda (encoded into 2 bytes x 3 characters)
- .append('/') // (encoded into 1 byte x 3 characters)
- .append('A') // not url-encoded (1 byte x 1 character)
- .append("\ud867\ude49") // flying fish in
- // Kanji (encoded into 4 bytes x 3 characters)
- .append('\u72AC'); // dog in Kanji (encoded into 3 bytes x 3 characters)
+ .append("[") // (encoded into 1 byte x 3 characters)
+ .append('\u03BB') // small lambda (encoded into 2 bytes x 3 characters)
+ .append('/') // (encoded into 1 byte x 3 characters)
+ .append('A') // not url-encoded (1 byte x 1 character)
+ .append("\ud867\ude49") // flying fish in
+ // Kanji (encoded into 4 bytes x 3 characters)
+ .append('\u72AC'); // dog in Kanji (encoded into 3 bytes x 3 characters)
longJobName = sb.toString();
info.setJobName(longJobName);
@@ -311,18 +311,18 @@ public void testJobNameWithMultibyteChars() throws IOException {
jobHistoryFile =
FileNameIndexUtils.getDoneFileName(info, 23);
- Assert.assertTrue(jobHistoryFile.length() <= 255);
+ assertTrue(jobHistoryFile.length() <= 255);
trimedJobName = jobHistoryFile.split(
FileNameIndexUtils.DELIMITER)[3]; // 3 is index of job name
// total size of the first 5 characters = 22
// 23 < total size of the first 6 characters
- Assert.assertEquals(22, trimedJobName.getBytes(UTF_8).length);
+ assertEquals(22, trimedJobName.getBytes(UTF_8).length);
// validate whether trimmedJobName by testing reversibility
trimedJobNameInByte = trimedJobName.getBytes(UTF_8);
reEncodedTrimedJobName = new String(trimedJobNameInByte, UTF_8);
- Assert.assertArrayEquals(trimedJobNameInByte,
+ assertArrayEquals(trimedJobNameInByte,
reEncodedTrimedJobName.getBytes(UTF_8));
}
@@ -341,8 +341,7 @@ public void testUserNamePercentDecoding() throws IOException {
JOB_START_TIME);
JobIndexInfo info = FileNameIndexUtils.getIndexInfo(jobHistoryFile);
- Assert.assertEquals("User name doesn't match",
- USER_NAME_WITH_DELIMITER, info.getUser());
+ assertEquals(USER_NAME_WITH_DELIMITER, info.getUser(), "User name doesn't match");
}
@Test
@@ -362,8 +361,8 @@ public void testJobNamePercentEncoding() throws IOException {
info.setJobStartTime(Long.parseLong(JOB_START_TIME));
String jobHistoryFile = FileNameIndexUtils.getDoneFileName(info);
- Assert.assertTrue("Job name not encoded correctly into job history file",
- jobHistoryFile.contains(JOB_NAME_WITH_DELIMITER_ESCAPE));
+ assertTrue(jobHistoryFile.contains(JOB_NAME_WITH_DELIMITER_ESCAPE),
+ "Job name not encoded correctly into job history file");
}
@Test
@@ -378,11 +377,10 @@ public void testJobNamePercentDecoding() throws IOException {
NUM_REDUCES,
JOB_STATUS,
QUEUE_NAME,
- JOB_START_TIME );
+ JOB_START_TIME);
JobIndexInfo info = FileNameIndexUtils.getIndexInfo(jobHistoryFile);
- Assert.assertEquals("Job name doesn't match",
- JOB_NAME_WITH_DELIMITER, info.getJobName());
+ assertEquals(JOB_NAME_WITH_DELIMITER, info.getJobName(), "Job name doesn't match");
}
@Test
@@ -402,8 +400,8 @@ public void testQueueNamePercentEncoding() throws IOException {
info.setJobStartTime(Long.parseLong(JOB_START_TIME));
String jobHistoryFile = FileNameIndexUtils.getDoneFileName(info);
- Assert.assertTrue("Queue name not encoded correctly into job history file",
- jobHistoryFile.contains(QUEUE_NAME_WITH_DELIMITER_ESCAPE));
+ assertTrue(jobHistoryFile.contains(QUEUE_NAME_WITH_DELIMITER_ESCAPE),
+ "Queue name not encoded correctly into job history file");
}
@Test
@@ -418,15 +416,14 @@ public void testQueueNamePercentDecoding() throws IOException {
NUM_REDUCES,
JOB_STATUS,
QUEUE_NAME_WITH_DELIMITER_ESCAPE,
- JOB_START_TIME );
+ JOB_START_TIME);
JobIndexInfo info = FileNameIndexUtils.getIndexInfo(jobHistoryFile);
- Assert.assertEquals("Queue name doesn't match",
- QUEUE_NAME_WITH_DELIMITER, info.getQueueName());
+ assertEquals(QUEUE_NAME_WITH_DELIMITER, info.getQueueName(), "Queue name doesn't match");
}
@Test
- public void testJobStartTimeBackwardsCompatible() throws IOException{
+ public void testJobStartTimeBackwardsCompatible() throws IOException {
String jobHistoryFile = String.format(OLD_FORMAT_BEFORE_ADD_START_TIME,
JOB_ID,
SUBMIT_TIME,
@@ -436,9 +433,9 @@ public void testJobStartTimeBackwardsCompatible() throws IOException{
NUM_MAPS,
NUM_REDUCES,
JOB_STATUS,
- QUEUE_NAME );
+ QUEUE_NAME);
JobIndexInfo info = FileNameIndexUtils.getIndexInfo(jobHistoryFile);
- Assert.assertEquals(info.getJobStartTime(), info.getSubmitTime());
+ assertEquals(info.getJobStartTime(), info.getSubmitTime());
}
@Test
@@ -462,24 +459,19 @@ public void testJobHistoryFileNameBackwardsCompatible() throws IOException {
JOB_STATUS);
JobIndexInfo info = FileNameIndexUtils.getIndexInfo(jobHistoryFile);
- Assert.assertEquals("Job id incorrect after decoding old history file",
- jobId, info.getJobId());
- Assert.assertEquals("Submit time incorrect after decoding old history file",
- submitTime, info.getSubmitTime());
- Assert.assertEquals("User incorrect after decoding old history file",
- USER_NAME, info.getUser());
- Assert.assertEquals("Job name incorrect after decoding old history file",
- JOB_NAME, info.getJobName());
- Assert.assertEquals("Finish time incorrect after decoding old history file",
- finishTime, info.getFinishTime());
- Assert.assertEquals("Num maps incorrect after decoding old history file",
- numMaps, info.getNumMaps());
- Assert.assertEquals("Num reduces incorrect after decoding old history file",
- numReduces, info.getNumReduces());
- Assert.assertEquals("Job status incorrect after decoding old history file",
- JOB_STATUS, info.getJobStatus());
- Assert.assertNull("Queue name incorrect after decoding old history file",
- info.getQueueName());
+ assertEquals(jobId, info.getJobId(), "Job id incorrect after decoding old history file");
+ assertEquals(submitTime, info.getSubmitTime(),
+ "Submit time incorrect after decoding old history file");
+ assertEquals(USER_NAME, info.getUser(), "User incorrect after decoding old history file");
+ assertEquals(JOB_NAME, info.getJobName(), "Job name incorrect after decoding old history file");
+ assertEquals(finishTime, info.getFinishTime(),
+ "Finish time incorrect after decoding old history file");
+ assertEquals(numMaps, info.getNumMaps(), "Num maps incorrect after decoding old history file");
+ assertEquals(numReduces, info.getNumReduces(),
+ "Num reduces incorrect after decoding old history file");
+ assertEquals(JOB_STATUS, info.getJobStatus(),
+ "Job status incorrect after decoding old history file");
+ assertNull(info.getQueueName(), "Queue name incorrect after decoding old history file");
}
@Test
@@ -503,7 +495,7 @@ public void testTrimJobNameEqualsLimitLength() throws IOException {
jobNameTrimLength);
JobIndexInfo parsedInfo = FileNameIndexUtils.getIndexInfo(jobHistoryFile);
- Assert.assertEquals("Job name did not get trimmed correctly", info
- .getJobName().substring(0, jobNameTrimLength), parsedInfo.getJobName());
+ assertEquals(info.getJobName().substring(0, jobNameTrimLength), parsedInfo.getJobName(),
+ "Job name did not get trimmed correctly");
}
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/jobhistory/TestJobHistoryUtils.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/jobhistory/TestJobHistoryUtils.java
index 447d18aa55..3eaf358d8c 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/jobhistory/TestJobHistoryUtils.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/jobhistory/TestJobHistoryUtils.java
@@ -32,10 +32,10 @@
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
-import org.junit.Assert;
-import org.junit.Test;
+import org.junit.jupiter.api.Test;
import static org.apache.hadoop.mapreduce.v2.jobhistory.JobHistoryUtils.getConfiguredHistoryIntermediateUserDoneDirPermissions;
+import static org.junit.jupiter.api.Assertions.assertEquals;
public class TestJobHistoryUtils {
@@ -47,7 +47,7 @@ public class TestJobHistoryUtils {
@SuppressWarnings("unchecked")
public void testGetHistoryDirsForCleaning() throws IOException {
Path pRoot = new Path(TEST_DIR, "org.apache.hadoop.mapreduce.v2.jobhistory."
- + "TestJobHistoryUtils.testGetHistoryDirsForCleaning");
+ + "TestJobHistoryUtils.testGetHistoryDirsForCleaning");
FileContext fc = FileContext.getFileContext();
Calendar cCal = Calendar.getInstance();
int year = 2013;
@@ -58,68 +58,68 @@ public void testGetHistoryDirsForCleaning() throws IOException {
clearDir(fc, pRoot);
Path pId00 = createPath(fc, pRoot, year, month, day, "000000");
- Path pId01 = createPath(fc, pRoot, year, month, day+1, "000001");
- Path pId02 = createPath(fc, pRoot, year, month, day-1, "000002");
- Path pId03 = createPath(fc, pRoot, year, month+1, day, "000003");
- Path pId04 = createPath(fc, pRoot, year, month+1, day+1, "000004");
- Path pId05 = createPath(fc, pRoot, year, month+1, day-1, "000005");
- Path pId06 = createPath(fc, pRoot, year, month-1, day, "000006");
- Path pId07 = createPath(fc, pRoot, year, month-1, day+1, "000007");
- Path pId08 = createPath(fc, pRoot, year, month-1, day-1, "000008");
- Path pId09 = createPath(fc, pRoot, year+1, month, day, "000009");
- Path pId10 = createPath(fc, pRoot, year+1, month, day+1, "000010");
- Path pId11 = createPath(fc, pRoot, year+1, month, day-1, "000011");
- Path pId12 = createPath(fc, pRoot, year+1, month+1, day, "000012");
- Path pId13 = createPath(fc, pRoot, year+1, month+1, day+1, "000013");
- Path pId14 = createPath(fc, pRoot, year+1, month+1, day-1, "000014");
- Path pId15 = createPath(fc, pRoot, year+1, month-1, day, "000015");
- Path pId16 = createPath(fc, pRoot, year+1, month-1, day+1, "000016");
- Path pId17 = createPath(fc, pRoot, year+1, month-1, day-1, "000017");
- Path pId18 = createPath(fc, pRoot, year-1, month, day, "000018");
- Path pId19 = createPath(fc, pRoot, year-1, month, day+1, "000019");
- Path pId20 = createPath(fc, pRoot, year-1, month, day-1, "000020");
- Path pId21 = createPath(fc, pRoot, year-1, month+1, day, "000021");
- Path pId22 = createPath(fc, pRoot, year-1, month+1, day+1, "000022");
- Path pId23 = createPath(fc, pRoot, year-1, month+1, day-1, "000023");
- Path pId24 = createPath(fc, pRoot, year-1, month-1, day, "000024");
- Path pId25 = createPath(fc, pRoot, year-1, month-1, day+1, "000025");
- Path pId26 = createPath(fc, pRoot, year-1, month-1, day-1, "000026");
+ Path pId01 = createPath(fc, pRoot, year, month, day + 1, "000001");
+ Path pId02 = createPath(fc, pRoot, year, month, day - 1, "000002");
+ Path pId03 = createPath(fc, pRoot, year, month + 1, day, "000003");
+ Path pId04 = createPath(fc, pRoot, year, month + 1, day + 1, "000004");
+ Path pId05 = createPath(fc, pRoot, year, month + 1, day - 1, "000005");
+ Path pId06 = createPath(fc, pRoot, year, month - 1, day, "000006");
+ Path pId07 = createPath(fc, pRoot, year, month - 1, day + 1, "000007");
+ Path pId08 = createPath(fc, pRoot, year, month - 1, day - 1, "000008");
+ Path pId09 = createPath(fc, pRoot, year + 1, month, day, "000009");
+ Path pId10 = createPath(fc, pRoot, year + 1, month, day + 1, "000010");
+ Path pId11 = createPath(fc, pRoot, year + 1, month, day - 1, "000011");
+ Path pId12 = createPath(fc, pRoot, year + 1, month + 1, day, "000012");
+ Path pId13 = createPath(fc, pRoot, year + 1, month + 1, day + 1, "000013");
+ Path pId14 = createPath(fc, pRoot, year + 1, month + 1, day - 1, "000014");
+ Path pId15 = createPath(fc, pRoot, year + 1, month - 1, day, "000015");
+ Path pId16 = createPath(fc, pRoot, year + 1, month - 1, day + 1, "000016");
+ Path pId17 = createPath(fc, pRoot, year + 1, month - 1, day - 1, "000017");
+ Path pId18 = createPath(fc, pRoot, year - 1, month, day, "000018");
+ Path pId19 = createPath(fc, pRoot, year - 1, month, day + 1, "000019");
+ Path pId20 = createPath(fc, pRoot, year - 1, month, day - 1, "000020");
+ Path pId21 = createPath(fc, pRoot, year - 1, month + 1, day, "000021");
+ Path pId22 = createPath(fc, pRoot, year - 1, month + 1, day + 1, "000022");
+ Path pId23 = createPath(fc, pRoot, year - 1, month + 1, day - 1, "000023");
+ Path pId24 = createPath(fc, pRoot, year - 1, month - 1, day, "000024");
+ Path pId25 = createPath(fc, pRoot, year - 1, month - 1, day + 1, "000025");
+ Path pId26 = createPath(fc, pRoot, year - 1, month - 1, day - 1, "000026");
// non-expected names should be ignored without problems
Path pId27 = createPath(fc, pRoot, "foo", "" + month, "" + day, "000027");
Path pId28 = createPath(fc, pRoot, "" + year, "foo", "" + day, "000028");
Path pId29 = createPath(fc, pRoot, "" + year, "" + month, "foo", "000029");
List dirs = JobHistoryUtils
- .getHistoryDirsForCleaning(fc, pRoot, cutoff);
+ .getHistoryDirsForCleaning(fc, pRoot, cutoff);
Collections.sort(dirs);
- Assert.assertEquals(14, dirs.size());
- Assert.assertEquals(pId26.toUri().getPath(),
+ assertEquals(14, dirs.size());
+ assertEquals(pId26.toUri().getPath(),
dirs.get(0).getPath().toUri().getPath());
- Assert.assertEquals(pId24.toUri().getPath(),
+ assertEquals(pId24.toUri().getPath(),
dirs.get(1).getPath().toUri().getPath());
- Assert.assertEquals(pId25.toUri().getPath(),
+ assertEquals(pId25.toUri().getPath(),
dirs.get(2).getPath().toUri().getPath());
- Assert.assertEquals(pId20.toUri().getPath(),
+ assertEquals(pId20.toUri().getPath(),
dirs.get(3).getPath().toUri().getPath());
- Assert.assertEquals(pId18.toUri().getPath(),
+ assertEquals(pId18.toUri().getPath(),
dirs.get(4).getPath().toUri().getPath());
- Assert.assertEquals(pId19.toUri().getPath(),
+ assertEquals(pId19.toUri().getPath(),
dirs.get(5).getPath().toUri().getPath());
- Assert.assertEquals(pId23.toUri().getPath(),
+ assertEquals(pId23.toUri().getPath(),
dirs.get(6).getPath().toUri().getPath());
- Assert.assertEquals(pId21.toUri().getPath(),
+ assertEquals(pId21.toUri().getPath(),
dirs.get(7).getPath().toUri().getPath());
- Assert.assertEquals(pId22.toUri().getPath(),
+ assertEquals(pId22.toUri().getPath(),
dirs.get(8).getPath().toUri().getPath());
- Assert.assertEquals(pId08.toUri().getPath(),
+ assertEquals(pId08.toUri().getPath(),
dirs.get(9).getPath().toUri().getPath());
- Assert.assertEquals(pId06.toUri().getPath(),
+ assertEquals(pId06.toUri().getPath(),
dirs.get(10).getPath().toUri().getPath());
- Assert.assertEquals(pId07.toUri().getPath(),
+ assertEquals(pId07.toUri().getPath(),
dirs.get(11).getPath().toUri().getPath());
- Assert.assertEquals(pId02.toUri().getPath(),
+ assertEquals(pId02.toUri().getPath(),
dirs.get(12).getPath().toUri().getPath());
- Assert.assertEquals(pId00.toUri().getPath(),
+ assertEquals(pId00.toUri().getPath(),
dirs.get(13).getPath().toUri().getPath());
}
@@ -152,15 +152,15 @@ private Path createPath(FileContext fc, Path root, String year, String month,
public void testGetConfiguredHistoryIntermediateUserDoneDirPermissions() {
Configuration conf = new Configuration();
Map parameters = ImmutableMap.of(
- "775", new FsPermission(0775),
- "123", new FsPermission(0773),
- "-rwx", new FsPermission(0770) ,
- "+rwx", new FsPermission(0777)
+ "775", new FsPermission(0775),
+ "123", new FsPermission(0773),
+ "-rwx", new FsPermission(0770),
+ "+rwx", new FsPermission(0777)
);
for (Map.Entry entry : parameters.entrySet()) {
conf.set(JHAdminConfig.MR_HISTORY_INTERMEDIATE_USER_DONE_DIR_PERMISSIONS,
entry.getKey());
- Assert.assertEquals(entry.getValue(),
+ assertEquals(entry.getValue(),
getConfiguredHistoryIntermediateUserDoneDirPermissions(conf));
}
}
diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java
index b4952ecc4d..a2201c9ff1 100644
--- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java
+++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-common/src/test/java/org/apache/hadoop/mapreduce/v2/util/TestMRApps.java
@@ -18,12 +18,13 @@
package org.apache.hadoop.mapreduce.v2.util;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertFalse;
-import static org.junit.Assert.assertNotNull;
-import static org.junit.Assert.assertNull;
-import static org.junit.Assert.assertTrue;
-import static org.junit.Assert.fail;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+import static org.junit.jupiter.api.Assertions.assertFalse;
+import static org.junit.jupiter.api.Assertions.assertNotNull;
+import static org.junit.jupiter.api.Assertions.assertNull;
+import static org.junit.jupiter.api.Assertions.assertThrows;
+import static org.junit.jupiter.api.Assertions.assertTrue;
+import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
@@ -58,14 +59,15 @@
import org.apache.hadoop.yarn.api.records.LocalResourceType;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
-import org.junit.AfterClass;
-import org.junit.BeforeClass;
-import org.junit.Test;
+import org.junit.jupiter.api.AfterAll;
+import org.junit.jupiter.api.BeforeAll;
+import org.junit.jupiter.api.Test;
+import org.junit.jupiter.api.Timeout;
public class TestMRApps {
private static File testWorkDir = null;
- @BeforeClass
+ @BeforeAll
public static void setupTestDirs() throws IOException {
testWorkDir = new File("target", TestMRApps.class.getCanonicalName());
delete(testWorkDir);
@@ -73,7 +75,7 @@ public static void setupTestDirs() throws IOException {
testWorkDir = testWorkDir.getAbsoluteFile();
}
- @AfterClass
+ @AfterAll
public static void cleanupTestDirs() throws IOException {
if (testWorkDir != null) {
delete(testWorkDir);
@@ -87,14 +89,16 @@ private static void delete(File dir) throws IOException {
fs.delete(p, true);
}
- @Test (timeout = 120000)
+ @Test
+ @Timeout(120000)
public void testJobIDtoString() {
JobId jid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class);
jid.setAppId(ApplicationId.newInstance(0, 0));
assertEquals("job_0_0000", MRApps.toString(jid));
}
- @Test (timeout = 120000)
+ @Test
+ @Timeout(120000)
public void testToJobID() {
JobId jid = MRApps.toJobID("job_1_1");
assertEquals(1, jid.getAppId().getClusterTimestamp());
@@ -102,13 +106,17 @@ public void testToJobID() {
assertEquals(1, jid.getId()); // tests against some proto.id and not a job.id field
}
- @Test (timeout = 120000, expected=IllegalArgumentException.class)
+ @Test
+ @Timeout(120000)
public void testJobIDShort() {
- MRApps.toJobID("job_0_0_0");
+ assertThrows(IllegalArgumentException.class, () -> {
+ MRApps.toJobID("job_0_0_0");
+ });
}
//TODO_get.set
- @Test (timeout = 120000)
+ @Test
+ @Timeout(120000)
public void testTaskIDtoString() {
TaskId tid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskId.class);
tid.setJobId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class));
@@ -124,7 +132,8 @@ public void testTaskIDtoString() {
assertEquals("task_0_0000_r_000000", MRApps.toString(tid));
}
- @Test (timeout = 120000)
+ @Test
+ @Timeout(120000)
public void testToTaskID() {
TaskId tid = MRApps.toTaskID("task_1_2_r_3");
assertEquals(1, tid.getJobId().getAppId().getClusterTimestamp());
@@ -137,28 +146,38 @@ public void testToTaskID() {
assertEquals(TaskType.MAP, tid.getTaskType());
}
- @Test(timeout = 120000, expected=IllegalArgumentException.class)
+ @Test
+ @Timeout(120000)
public void testTaskIDShort() {
- MRApps.toTaskID("task_0_0000_m");
+ assertThrows(IllegalArgumentException.class, () -> {
+ MRApps.toTaskID("task_0_0000_m");
+ });
}
- @Test(timeout = 120000, expected=IllegalArgumentException.class)
+ @Test
+ @Timeout(120000)
public void testTaskIDBadType() {
- MRApps.toTaskID("task_0_0000_x_000000");
+ assertThrows(IllegalArgumentException.class, () -> {
+ MRApps.toTaskID("task_0_0000_x_000000");
+ });
}
//TODO_get.set
- @Test (timeout = 120000)
+ @Test
+ @Timeout(120000)
public void testTaskAttemptIDtoString() {
- TaskAttemptId taid = RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskAttemptId.class);
+ TaskAttemptId taid =
+ RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskAttemptId.class);
taid.setTaskId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(TaskId.class));
taid.getTaskId().setTaskType(TaskType.MAP);
- taid.getTaskId().setJobId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class));
+ taid.getTaskId()
+ .setJobId(RecordFactoryProvider.getRecordFactory(null).newRecordInstance(JobId.class));
taid.getTaskId().getJobId().setAppId(ApplicationId.newInstance(0, 0));
assertEquals("attempt_0_0000_m_000000_0", MRApps.toString(taid));
}
- @Test (timeout = 120000)
+ @Test
+ @Timeout(120000)
public void testToTaskAttemptID() {
TaskAttemptId taid = MRApps.toTaskAttemptID("attempt_0_1_m_2_3");
assertEquals(0, taid.getTaskId().getJobId().getAppId().getClusterTimestamp());
@@ -168,23 +187,28 @@ public void testToTaskAttemptID() {
assertEquals(3, taid.getId());
}
- @Test(timeout = 120000, expected=IllegalArgumentException.class)
+ @Test
+ @Timeout(120000)
public void testTaskAttemptIDShort() {
- MRApps.toTaskAttemptID("attempt_0_0_0_m_0");
+ assertThrows(IllegalArgumentException.class, () -> {
+ MRApps.toTaskAttemptID("attempt_0_0_0_m_0");
+ });
}
- @Test (timeout = 120000)
+ @Test
+ @Timeout(120000)
public void testGetJobFileWithUser() {
Configuration conf = new Configuration();
conf.set(MRJobConfig.MR_AM_STAGING_DIR, "/my/path/to/staging");
- String jobFile = MRApps.getJobFile(conf, "dummy-user",
+ String jobFile = MRApps.getJobFile(conf, "dummy-user",
new JobID("dummy-job", 12345));
- assertNotNull("getJobFile results in null.", jobFile);
- assertEquals("jobFile with specified user is not as expected.",
- "/my/path/to/staging/dummy-user/.staging/job_dummy-job_12345/job.xml", jobFile);
+ assertNotNull(jobFile, "getJobFile results in null.");
+ assertEquals("/my/path/to/staging/dummy-user/.staging/job_dummy-job_12345/job.xml", jobFile,
+ "jobFile with specified user is not as expected.");
}
- @Test (timeout = 120000)
+ @Test
+ @Timeout(120000)
public void testSetClasspath() throws IOException {
Configuration conf = new Configuration();
conf.setBoolean(MRConfig.MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM, true);
@@ -192,8 +216,8 @@ public void testSetClasspath() throws IOException {
Map environment = new HashMap();
MRApps.setClasspath(environment, job.getConfiguration());
assertTrue(environment.get("CLASSPATH").startsWith(
- ApplicationConstants.Environment.PWD.$$()
- + ApplicationConstants.CLASS_PATH_SEPARATOR));
+ ApplicationConstants.Environment.PWD.$$()
+ + ApplicationConstants.CLASS_PATH_SEPARATOR));
String yarnAppClasspath = job.getConfiguration().get(
YarnConfiguration.YARN_APPLICATION_CLASSPATH,
StringUtils.join(",",
@@ -201,23 +225,24 @@ public void testSetClasspath() throws IOException {
if (yarnAppClasspath != null) {
yarnAppClasspath =
yarnAppClasspath.replaceAll(",\\s*",
- ApplicationConstants.CLASS_PATH_SEPARATOR).trim();
+ ApplicationConstants.CLASS_PATH_SEPARATOR).trim();
}
assertTrue(environment.get("CLASSPATH").contains(yarnAppClasspath));
- String mrAppClasspath =
+ String mrAppClasspath =
job.getConfiguration().get(
MRJobConfig.MAPREDUCE_APPLICATION_CLASSPATH,
MRJobConfig.DEFAULT_MAPREDUCE_CROSS_PLATFORM_APPLICATION_CLASSPATH);
if (mrAppClasspath != null) {
mrAppClasspath =
mrAppClasspath.replaceAll(",\\s*",
- ApplicationConstants.CLASS_PATH_SEPARATOR).trim();
+ ApplicationConstants.CLASS_PATH_SEPARATOR).trim();
}
assertTrue(environment.get("CLASSPATH").contains(mrAppClasspath));
}
-
- @Test (timeout = 120000)
- public void testSetClasspathWithArchives () throws IOException {
+
+ @Test
+ @Timeout(120000)
+ public void testSetClasspathWithArchives() throws IOException {
File testTGZ = new File(testWorkDir, "test.tgz");
FileOutputStream out = new FileOutputStream(testTGZ);
out.write(0);
@@ -227,27 +252,28 @@ public void testSetClasspathWithArchives () throws IOException {
Job job = Job.getInstance(conf);
conf = job.getConfiguration();
String testTGZQualifiedPath = FileSystem.getLocal(conf).makeQualified(new Path(
- testTGZ.getAbsolutePath())).toString();
+ testTGZ.getAbsolutePath())).toString();
conf.set(MRJobConfig.CLASSPATH_ARCHIVES, testTGZQualifiedPath);
conf.set(MRJobConfig.CACHE_ARCHIVES, testTGZQualifiedPath + "#testTGZ");
Map environment = new HashMap();
MRApps.setClasspath(environment, conf);
assertTrue(environment.get("CLASSPATH").startsWith(
- ApplicationConstants.Environment.PWD.$$() + ApplicationConstants.CLASS_PATH_SEPARATOR));
+ ApplicationConstants.Environment.PWD.$$() + ApplicationConstants.CLASS_PATH_SEPARATOR));
String confClasspath = job.getConfiguration().get(
YarnConfiguration.YARN_APPLICATION_CLASSPATH,
StringUtils.join(",",
YarnConfiguration.DEFAULT_YARN_CROSS_PLATFORM_APPLICATION_CLASSPATH));
if (confClasspath != null) {
confClasspath = confClasspath.replaceAll(",\\s*", ApplicationConstants.CLASS_PATH_SEPARATOR)
- .trim();
+ .trim();
}
assertTrue(environment.get("CLASSPATH").contains(confClasspath));
assertTrue(environment.get("CLASSPATH").contains("testTGZ"));
}
- @Test (timeout = 120000)
- public void testSetClasspathWithUserPrecendence() {
+ @Test
+ @Timeout(120000)
+ public void testSetClasspathWithUserPrecendence() {
Configuration conf = new Configuration();
conf.setBoolean(MRConfig.MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM, true);
conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true);
@@ -260,13 +286,14 @@ public void testSetClasspathWithUserPrecendence() {
String env_str = env.get("CLASSPATH");
String expectedClasspath = StringUtils.join(ApplicationConstants.CLASS_PATH_SEPARATOR,
Arrays.asList(ApplicationConstants.Environment.PWD.$$(), "job.jar/*",
- "job.jar/classes/", "job.jar/lib/*",
- ApplicationConstants.Environment.PWD.$$() + "/*"));
- assertTrue("MAPREDUCE_JOB_USER_CLASSPATH_FIRST set, but not taking effect!",
- env_str.startsWith(expectedClasspath));
+ "job.jar/classes/", "job.jar/lib/*",
+ ApplicationConstants.Environment.PWD.$$() + "/*"));
+ assertTrue(env_str.startsWith(expectedClasspath),
+ "MAPREDUCE_JOB_USER_CLASSPATH_FIRST set, but not taking effect!");
}
- @Test (timeout = 120000)
+ @Test
+ @Timeout(120000)
public void testSetClasspathWithNoUserPrecendence() {
Configuration conf = new Configuration();
conf.setBoolean(MRConfig.MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM, true);
@@ -280,14 +307,15 @@ public void testSetClasspathWithNoUserPrecendence() {
String env_str = env.get("CLASSPATH");
String expectedClasspath = StringUtils.join(ApplicationConstants.CLASS_PATH_SEPARATOR,
Arrays.asList("job.jar/*", "job.jar/classes/", "job.jar/lib/*",
- ApplicationConstants.Environment.PWD.$$() + "/*"));
- assertTrue("MAPREDUCE_JOB_USER_CLASSPATH_FIRST false, and job.jar is not in"
- + " the classpath!", env_str.contains(expectedClasspath));
- assertFalse("MAPREDUCE_JOB_USER_CLASSPATH_FIRST false, but taking effect!",
- env_str.startsWith(expectedClasspath));
+ ApplicationConstants.Environment.PWD.$$() + "/*"));
+ assertTrue(env_str.contains(expectedClasspath),
+ "MAPREDUCE_JOB_USER_CLASSPATH_FIRST false, and job.jar is not in" + " the classpath!");
+ assertFalse(env_str.startsWith(expectedClasspath),
+ "MAPREDUCE_JOB_USER_CLASSPATH_FIRST false, but taking effect!");
}
-
- @Test (timeout = 120000)
+
+ @Test
+ @Timeout(120000)
public void testSetClasspathWithJobClassloader() throws IOException {
Configuration conf = new Configuration();
conf.setBoolean(MRConfig.MAPREDUCE_APP_SUBMISSION_CROSS_PLATFORM, true);
@@ -296,19 +324,18 @@ public void testSetClasspathWithJobClassloader() throws IOException {
MRApps.setClasspath(env, conf);
String cp = env.get("CLASSPATH");
String appCp = env.get("APP_CLASSPATH");
- assertFalse("MAPREDUCE_JOB_CLASSLOADER true, but job.jar is in the"
- + " classpath!", cp.contains("jar" + ApplicationConstants.CLASS_PATH_SEPARATOR + "job"));
- assertFalse("MAPREDUCE_JOB_CLASSLOADER true, but PWD is in the classpath!",
- cp.contains("PWD"));
+ assertFalse(cp.contains("jar" + ApplicationConstants.CLASS_PATH_SEPARATOR + "job"),
+ "MAPREDUCE_JOB_CLASSLOADER true, but job.jar is in the" + " classpath!");
+ assertFalse(cp.contains("PWD"), "MAPREDUCE_JOB_CLASSLOADER true, but PWD is in the classpath!");
String expectedAppClasspath = StringUtils.join(ApplicationConstants.CLASS_PATH_SEPARATOR,
- Arrays.asList(ApplicationConstants.Environment.PWD.$$(), "job.jar/*",
- "job.jar/classes/", "job.jar/lib/*",
- ApplicationConstants.Environment.PWD.$$() + "/*"));
- assertEquals("MAPREDUCE_JOB_CLASSLOADER true, but job.jar is not in the app"
- + " classpath!", expectedAppClasspath, appCp);
+ Arrays.asList(ApplicationConstants.Environment.PWD.$$(), "job.jar/*", "job.jar/classes/",
+ "job.jar/lib/*", ApplicationConstants.Environment.PWD.$$() + "/*"));
+ assertEquals(expectedAppClasspath, appCp,
+ "MAPREDUCE_JOB_CLASSLOADER true, but job.jar is not in the app" + " classpath!");
}
- @Test (timeout = 3000000)
+ @Test
+ @Timeout(3000000)
public void testSetClasspathWithFramework() throws IOException {
final String FRAMEWORK_NAME = "some-framework-name";
final String FRAMEWORK_PATH = "some-framework-path#" + FRAMEWORK_NAME;
@@ -320,9 +347,9 @@ public void testSetClasspathWithFramework() throws IOException {
MRApps.setClasspath(env, conf);
fail("Failed to catch framework path set without classpath change");
} catch (IllegalArgumentException e) {
- assertTrue("Unexpected IllegalArgumentException",
- e.getMessage().contains("Could not locate MapReduce framework name '"
- + FRAMEWORK_NAME + "'"));
+ assertTrue(e.getMessage().contains("Could not locate MapReduce framework name '"
+ + FRAMEWORK_NAME + "'"),
+ "Unexpected IllegalArgumentException");
}
env.clear();
@@ -333,48 +360,50 @@ public void testSetClasspathWithFramework() throws IOException {
Arrays.asList("job.jar/*", "job.jar/classes/", "job.jar/lib/*",
ApplicationConstants.Environment.PWD.$$() + "/*"));
String expectedClasspath = StringUtils.join(ApplicationConstants.CLASS_PATH_SEPARATOR,
- Arrays.asList(ApplicationConstants.Environment.PWD.$$(),
- FRAMEWORK_CLASSPATH, stdClasspath));
- assertEquals("Incorrect classpath with framework and no user precedence",
- expectedClasspath, env.get("CLASSPATH"));
+ Arrays.asList(ApplicationConstants.Environment.PWD.$$(), FRAMEWORK_CLASSPATH,
+ stdClasspath));
+ assertEquals(expectedClasspath, env.get("CLASSPATH"),
+ "Incorrect classpath with framework and no user precedence");
env.clear();
conf.setBoolean(MRJobConfig.MAPREDUCE_JOB_USER_CLASSPATH_FIRST, true);
MRApps.setClasspath(env, conf);
expectedClasspath = StringUtils.join(ApplicationConstants.CLASS_PATH_SEPARATOR,
- Arrays.asList(ApplicationConstants.Environment.PWD.$$(),
- stdClasspath, FRAMEWORK_CLASSPATH));
- assertEquals("Incorrect classpath with framework and user precedence",
- expectedClasspath, env.get("CLASSPATH"));
+ Arrays.asList(ApplicationConstants.Environment.PWD.$$(), stdClasspath,
+ FRAMEWORK_CLASSPATH));
+ assertEquals(expectedClasspath, env.get("CLASSPATH"),
+ "Incorrect classpath with framework and user precedence");
}
- @Test (timeout = 30000)
+ @Test
+ @Timeout(30000)
public void testSetupDistributedCacheEmpty() throws IOException {
Configuration conf = new Configuration();
Map localResources = new HashMap();
MRApps.setupDistributedCache(conf, localResources);
- assertTrue("Empty Config did not produce an empty list of resources",
- localResources.isEmpty());
+ assertTrue(localResources.isEmpty(),
+ "Empty Config did not produce an empty list of resources");
}
-
+
@SuppressWarnings("deprecation")
- @Test(timeout = 120000)
+ @Test
+ @Timeout(120000)
public void testSetupDistributedCacheConflicts() throws Exception {
Configuration conf = new Configuration();
conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class);
-
+
URI mockUri = URI.create("mockfs://mock/");
- FileSystem mockFs = ((FilterFileSystem)FileSystem.get(mockUri, conf))
+ FileSystem mockFs = ((FilterFileSystem) FileSystem.get(mockUri, conf))
.getRawFileSystem();
-
+
URI archive = new URI("mockfs://mock/tmp/something.zip#something");
Path archivePath = new Path(archive);
URI file = new URI("mockfs://mock/tmp/something.txt#something");
Path filePath = new Path(file);
-
+
when(mockFs.resolvePath(archivePath)).thenReturn(archivePath);
when(mockFs.resolvePath(filePath)).thenReturn(filePath);
-
+
Job.addCacheArchive(archive, conf);
conf.set(MRJobConfig.CACHE_ARCHIVES_TIMESTAMPS, "10");
conf.set(MRJobConfig.CACHE_ARCHIVES_SIZES, "10");
@@ -383,8 +412,8 @@ public void testSetupDistributedCacheConflicts() throws Exception {
conf.set(MRJobConfig.CACHE_FILE_TIMESTAMPS, "11");
conf.set(MRJobConfig.CACHE_FILES_SIZES, "11");
conf.set(MRJobConfig.CACHE_FILE_VISIBILITIES, "true");
- Map localResources =
- new HashMap();
+ Map localResources =
+ new HashMap();
MRApps.setupDistributedCache(conf, localResources);
assertEquals(1, localResources.size());
@@ -395,32 +424,33 @@ public void testSetupDistributedCacheConflicts() throws Exception {
assertEquals(10l, lr.getTimestamp());
assertEquals(LocalResourceType.ARCHIVE, lr.getType());
}
-
+
@SuppressWarnings("deprecation")
- @Test(timeout = 120000)
+ @Test
+ @Timeout(120000)
public void testSetupDistributedCacheConflictsFiles() throws Exception {
Configuration conf = new Configuration();
conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class);
-
+
URI mockUri = URI.create("mockfs://mock/");
- FileSystem mockFs = ((FilterFileSystem)FileSystem.get(mockUri, conf))
+ FileSystem mockFs = ((FilterFileSystem) FileSystem.get(mockUri, conf))
.getRawFileSystem();
-
+
URI file = new URI("mockfs://mock/tmp/something.zip#something");
Path filePath = new Path(file);
URI file2 = new URI("mockfs://mock/tmp/something.txt#something");
Path file2Path = new Path(file2);
-
+
when(mockFs.resolvePath(filePath)).thenReturn(filePath);
when(mockFs.resolvePath(file2Path)).thenReturn(file2Path);
-
+
Job.addCacheFile(file, conf);
Job.addCacheFile(file2, conf);
conf.set(MRJobConfig.CACHE_FILE_TIMESTAMPS, "10,11");
conf.set(MRJobConfig.CACHE_FILES_SIZES, "10,11");
conf.set(MRJobConfig.CACHE_FILE_VISIBILITIES, "true,true");
- Map localResources =
- new HashMap();
+ Map localResources =
+ new HashMap();
MRApps.setupDistributedCache(conf, localResources);
assertEquals(1, localResources.size());
@@ -431,25 +461,26 @@ public void testSetupDistributedCacheConflictsFiles() throws Exception {
assertEquals(10l, lr.getTimestamp());
assertEquals(LocalResourceType.FILE, lr.getType());
}
-
+
@SuppressWarnings("deprecation")
- @Test (timeout = 30000)
+ @Test
+ @Timeout(30000)
public void testSetupDistributedCache() throws Exception {
Configuration conf = new Configuration();
conf.setClass("fs.mockfs.impl", MockFileSystem.class, FileSystem.class);
-
+
URI mockUri = URI.create("mockfs://mock/");
- FileSystem mockFs = ((FilterFileSystem)FileSystem.get(mockUri, conf))
+ FileSystem mockFs = ((FilterFileSystem) FileSystem.get(mockUri, conf))
.getRawFileSystem();
-
+
URI archive = new URI("mockfs://mock/tmp/something.zip");
Path archivePath = new Path(archive);
URI file = new URI("mockfs://mock/tmp/something.txt#something");
Path filePath = new Path(file);
-
+
when(mockFs.resolvePath(archivePath)).thenReturn(archivePath);
when(mockFs.resolvePath(filePath)).thenReturn(filePath);
-
+
Job.addCacheArchive(archive, conf);
conf.set(MRJobConfig.CACHE_ARCHIVES_TIMESTAMPS, "10");
conf.set(MRJobConfig.CACHE_ARCHIVES_SIZES, "10");
@@ -458,8 +489,8 @@ public void testSetupDistributedCache() throws Exception {
conf.set(MRJobConfig.CACHE_FILE_TIMESTAMPS, "11");
conf.set(MRJobConfig.CACHE_FILES_SIZES, "11");
conf.set(MRJobConfig.CACHE_FILE_VISIBILITIES, "true");
- Map localResources =
- new HashMap();
+ Map localResources =
+ new HashMap();
MRApps.setupDistributedCache(conf, localResources);
assertEquals(2, localResources.size());
LocalResource lr = localResources.get("something.zip");
@@ -519,35 +550,34 @@ public void testTaskStateUI() {
"/org/apache/hadoop/fake/Klass"
};
- private static final String[] DEFAULT_XMLS = new String[] {
- "core-default.xml",
- "mapred-default.xml",
- "hdfs-default.xml",
- "yarn-default.xml"
- };
+ private static final String[] DEFAULT_XMLS =
+ new String[] {"core-default.xml", "mapred-default.xml", "hdfs-default.xml",
+ "yarn-default.xml"};
@Test
public void testSystemClasses() {
final List systemClasses =
Arrays.asList(StringUtils.getTrimmedStrings(
- ApplicationClassLoader.SYSTEM_CLASSES_DEFAULT));
+ ApplicationClassLoader.SYSTEM_CLASSES_DEFAULT));
for (String defaultXml : DEFAULT_XMLS) {
- assertTrue(defaultXml + " must be system resource",
- ApplicationClassLoader.isSystemClass(defaultXml, systemClasses));
+ assertTrue(ApplicationClassLoader.isSystemClass(defaultXml, systemClasses),
+ defaultXml + " must be system resource");
}
for (String klass : SYS_CLASSES) {
- assertTrue(klass + " must be system class",
- ApplicationClassLoader.isSystemClass(klass, systemClasses));
+ assertTrue(ApplicationClassLoader.isSystemClass(klass, systemClasses),
+ klass + " must be system class");
}
- assertFalse("/fake/Klass must not be a system class",
- ApplicationClassLoader.isSystemClass("/fake/Klass", systemClasses));
+ assertFalse(ApplicationClassLoader.isSystemClass("/fake/Klass", systemClasses),
+ "/fake/Klass must not be a system class");
}
- @Test(expected = IllegalArgumentException.class)
+ @Test
public void testInvalidWebappAddress() throws Exception {
- Configuration conf = new Configuration();
- conf.set(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS, "19888");
- MRWebAppUtil.getApplicationWebURLOnJHSWithScheme(
- conf, ApplicationId.newInstance(0, 1));
+ assertThrows(IllegalArgumentException.class, () -> {
+ Configuration conf = new Configuration();
+ conf.set(JHAdminConfig.MR_HISTORY_WEBAPP_ADDRESS, "19888");
+ MRWebAppUtil.getApplicationWebURLOnJHSWithScheme(
+ conf, ApplicationId.newInstance(0, 1));
+ });
}
}