MAPREDUCE-4987. TestMRJobs#testDistributedCache fails on Windows due to classpath problems and unexpected behavior of symlinks (Chris Nauroth via bikas)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1470003 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
edcfd4527c
commit
c1ce3c5ad2
@ -134,6 +134,10 @@ Trunk (Unreleased)
|
|||||||
MAPREDUCE-4885. Streaming tests have multiple failures on Windows. (Chris
|
MAPREDUCE-4885. Streaming tests have multiple failures on Windows. (Chris
|
||||||
Nauroth via bikas)
|
Nauroth via bikas)
|
||||||
|
|
||||||
|
MAPREDUCE-4987. TestMRJobs#testDistributedCache fails on Windows due to
|
||||||
|
classpath problems and unexpected behavior of symlinks (Chris Nauroth via
|
||||||
|
bikas)
|
||||||
|
|
||||||
BREAKDOWN OF HADOOP-8562 SUBTASKS
|
BREAKDOWN OF HADOOP-8562 SUBTASKS
|
||||||
|
|
||||||
MAPREDUCE-4739. Some MapReduce tests fail to find winutils.
|
MAPREDUCE-4739. Some MapReduce tests fail to find winutils.
|
||||||
|
@ -18,11 +18,13 @@
|
|||||||
|
|
||||||
package org.apache.hadoop.mapreduce.v2;
|
package org.apache.hadoop.mapreduce.v2;
|
||||||
|
|
||||||
|
import java.io.BufferedReader;
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileInputStream;
|
import java.io.FileInputStream;
|
||||||
import java.io.FileNotFoundException;
|
import java.io.FileNotFoundException;
|
||||||
import java.io.FileOutputStream;
|
import java.io.FileOutputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.io.StringReader;
|
||||||
import java.net.URI;
|
import java.net.URI;
|
||||||
import java.security.PrivilegedExceptionAction;
|
import java.security.PrivilegedExceptionAction;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
@ -47,6 +49,7 @@
|
|||||||
import org.apache.hadoop.fs.RemoteIterator;
|
import org.apache.hadoop.fs.RemoteIterator;
|
||||||
import org.apache.hadoop.fs.permission.FsPermission;
|
import org.apache.hadoop.fs.permission.FsPermission;
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
|
import org.apache.hadoop.io.IOUtils;
|
||||||
import org.apache.hadoop.io.LongWritable;
|
import org.apache.hadoop.io.LongWritable;
|
||||||
import org.apache.hadoop.io.NullWritable;
|
import org.apache.hadoop.io.NullWritable;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
@ -71,6 +74,7 @@
|
|||||||
import org.apache.hadoop.security.token.Token;
|
import org.apache.hadoop.security.token.Token;
|
||||||
import org.apache.hadoop.security.token.TokenIdentifier;
|
import org.apache.hadoop.security.token.TokenIdentifier;
|
||||||
import org.apache.hadoop.util.JarFinder;
|
import org.apache.hadoop.util.JarFinder;
|
||||||
|
import org.apache.hadoop.util.Shell;
|
||||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||||
import org.junit.AfterClass;
|
import org.junit.AfterClass;
|
||||||
import org.junit.Assert;
|
import org.junit.Assert;
|
||||||
@ -93,13 +97,6 @@ public class TestMRJobs {
|
|||||||
} catch (IOException io) {
|
} catch (IOException io) {
|
||||||
throw new RuntimeException("problem getting local fs", io);
|
throw new RuntimeException("problem getting local fs", io);
|
||||||
}
|
}
|
||||||
try {
|
|
||||||
dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2)
|
|
||||||
.format(true).racks(null).build();
|
|
||||||
remoteFs = dfsCluster.getFileSystem();
|
|
||||||
} catch (IOException io) {
|
|
||||||
throw new RuntimeException("problem starting mini dfs cluster", io);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Path TEST_ROOT_DIR = new Path("target",
|
private static Path TEST_ROOT_DIR = new Path("target",
|
||||||
@ -110,6 +107,13 @@ public class TestMRJobs {
|
|||||||
|
|
||||||
@BeforeClass
|
@BeforeClass
|
||||||
public static void setup() throws IOException {
|
public static void setup() throws IOException {
|
||||||
|
try {
|
||||||
|
dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2)
|
||||||
|
.format(true).racks(null).build();
|
||||||
|
remoteFs = dfsCluster.getFileSystem();
|
||||||
|
} catch (IOException io) {
|
||||||
|
throw new RuntimeException("problem starting mini dfs cluster", io);
|
||||||
|
}
|
||||||
|
|
||||||
if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) {
|
if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) {
|
||||||
LOG.info("MRAppJar " + MiniMRYarnCluster.APPJAR
|
LOG.info("MRAppJar " + MiniMRYarnCluster.APPJAR
|
||||||
@ -215,7 +219,7 @@ protected void verifyTaskProgress(Job job) throws InterruptedException,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test (timeout = 30000)
|
@Test (timeout = 60000)
|
||||||
public void testRandomWriter() throws IOException, InterruptedException,
|
public void testRandomWriter() throws IOException, InterruptedException,
|
||||||
ClassNotFoundException {
|
ClassNotFoundException {
|
||||||
|
|
||||||
@ -277,7 +281,7 @@ protected void verifyRandomWriterCounters(Job job)
|
|||||||
&& counters.findCounter(JobCounter.SLOTS_MILLIS_MAPS).getValue() != 0);
|
&& counters.findCounter(JobCounter.SLOTS_MILLIS_MAPS).getValue() != 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test (timeout = 30000)
|
@Test (timeout = 60000)
|
||||||
public void testFailingMapper() throws IOException, InterruptedException,
|
public void testFailingMapper() throws IOException, InterruptedException,
|
||||||
ClassNotFoundException {
|
ClassNotFoundException {
|
||||||
|
|
||||||
@ -359,7 +363,7 @@ protected Job runFailingMapperJob()
|
|||||||
return job;
|
return job;
|
||||||
}
|
}
|
||||||
|
|
||||||
//@Test (timeout = 30000)
|
//@Test (timeout = 60000)
|
||||||
public void testSleepJobWithSecurityOn() throws IOException,
|
public void testSleepJobWithSecurityOn() throws IOException,
|
||||||
InterruptedException, ClassNotFoundException {
|
InterruptedException, ClassNotFoundException {
|
||||||
|
|
||||||
@ -467,8 +471,46 @@ public void setup(Context context) throws IOException {
|
|||||||
// Check that the symlink for the Job Jar was created in the cwd and
|
// Check that the symlink for the Job Jar was created in the cwd and
|
||||||
// points to the extracted directory
|
// points to the extracted directory
|
||||||
File jobJarDir = new File("job.jar");
|
File jobJarDir = new File("job.jar");
|
||||||
Assert.assertTrue(FileUtils.isSymlink(jobJarDir));
|
if (Shell.WINDOWS) {
|
||||||
Assert.assertTrue(jobJarDir.isDirectory());
|
Assert.assertTrue(isWindowsSymlinkedDirectory(jobJarDir));
|
||||||
|
} else {
|
||||||
|
Assert.assertTrue(FileUtils.isSymlink(jobJarDir));
|
||||||
|
Assert.assertTrue(jobJarDir.isDirectory());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Used on Windows to determine if the specified file is a symlink that
|
||||||
|
* targets a directory. On most platforms, these checks can be done using
|
||||||
|
* commons-io. On Windows, the commons-io implementation is unreliable and
|
||||||
|
* always returns false. Instead, this method checks the output of the dir
|
||||||
|
* command. After migrating to Java 7, this method can be removed in favor
|
||||||
|
* of the new method java.nio.file.Files.isSymbolicLink, which is expected to
|
||||||
|
* work cross-platform.
|
||||||
|
*
|
||||||
|
* @param file File to check
|
||||||
|
* @return boolean true if the file is a symlink that targets a directory
|
||||||
|
* @throws IOException thrown for any I/O error
|
||||||
|
*/
|
||||||
|
private static boolean isWindowsSymlinkedDirectory(File file)
|
||||||
|
throws IOException {
|
||||||
|
String dirOut = Shell.execCommand("cmd", "/c", "dir",
|
||||||
|
file.getAbsoluteFile().getParent());
|
||||||
|
StringReader sr = new StringReader(dirOut);
|
||||||
|
BufferedReader br = new BufferedReader(sr);
|
||||||
|
try {
|
||||||
|
String line = br.readLine();
|
||||||
|
while (line != null) {
|
||||||
|
line = br.readLine();
|
||||||
|
if (line.contains(file.getName()) && line.contains("<SYMLINKD>")) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
} finally {
|
||||||
|
IOUtils.closeStream(br);
|
||||||
|
IOUtils.closeStream(sr);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -542,7 +584,7 @@ public void _testDistributedCache(String jobJarPath) throws Exception {
|
|||||||
trackingUrl.endsWith(jobId.substring(jobId.lastIndexOf("_")) + "/"));
|
trackingUrl.endsWith(jobId.substring(jobId.lastIndexOf("_")) + "/"));
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test (timeout = 300000)
|
@Test (timeout = 600000)
|
||||||
public void testDistributedCache() throws Exception {
|
public void testDistributedCache() throws Exception {
|
||||||
// Test with a local (file:///) Job Jar
|
// Test with a local (file:///) Job Jar
|
||||||
Path localJobJarPath = makeJobJarWithLib(TEST_ROOT_DIR.toUri().toString());
|
Path localJobJarPath = makeJobJarWithLib(TEST_ROOT_DIR.toUri().toString());
|
||||||
|
Loading…
Reference in New Issue
Block a user