HADOOP-9427. Use JUnit assumptions to skip platform-specific tests. Contributed by Gergely Novák.
This commit is contained in:
parent
55d5993a8e
commit
54fe17a607
@ -34,6 +34,7 @@
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.apache.hadoop.fs.FileContextTestHelper.*;
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
@ -98,10 +99,7 @@ private void cleanupFile(FileContext fc, Path name) throws IOException {
|
||||
|
||||
@Test
|
||||
public void testCreatePermission() throws IOException {
|
||||
if (Path.WINDOWS) {
|
||||
System.out.println("Cannot run test for Windows");
|
||||
return;
|
||||
}
|
||||
assumeNotWindows();
|
||||
String filename = "foo";
|
||||
Path f = fileContextTestHelper.getTestRootPath(fc, filename);
|
||||
fileContextTestHelper.createFile(fc, filename);
|
||||
@ -112,10 +110,7 @@ public void testCreatePermission() throws IOException {
|
||||
|
||||
@Test
|
||||
public void testSetPermission() throws IOException {
|
||||
if (Path.WINDOWS) {
|
||||
System.out.println("Cannot run test for Windows");
|
||||
return;
|
||||
}
|
||||
assumeNotWindows();
|
||||
|
||||
String filename = "foo";
|
||||
Path f = fileContextTestHelper.getTestRootPath(fc, filename);
|
||||
@ -137,10 +132,7 @@ public void testSetPermission() throws IOException {
|
||||
|
||||
@Test
|
||||
public void testSetOwner() throws IOException {
|
||||
if (Path.WINDOWS) {
|
||||
System.out.println("Cannot run test for Windows");
|
||||
return;
|
||||
}
|
||||
assumeNotWindows();
|
||||
|
||||
String filename = "bar";
|
||||
Path f = fileContextTestHelper.getTestRootPath(fc, filename);
|
||||
|
@ -43,12 +43,12 @@
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.util.Shell;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.tools.tar.TarEntry;
|
||||
import org.apache.tools.tar.TarOutputStream;
|
||||
|
||||
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
@ -423,10 +423,8 @@ private void validateAndSetWritablePermissions(
|
||||
|
||||
@Test (timeout = 30000)
|
||||
public void testFailFullyDelete() throws IOException {
|
||||
if(Shell.WINDOWS) {
|
||||
// windows Dir.setWritable(false) does not work for directories
|
||||
return;
|
||||
}
|
||||
// Windows Dir.setWritable(false) does not work for directories
|
||||
assumeNotWindows();
|
||||
LOG.info("Running test to verify failure of fullyDelete()");
|
||||
setupDirsAndNonWritablePermissions();
|
||||
boolean ret = FileUtil.fullyDelete(new MyFile(del));
|
||||
@ -504,10 +502,8 @@ public File[] listFiles() {
|
||||
|
||||
@Test (timeout = 30000)
|
||||
public void testFailFullyDeleteContents() throws IOException {
|
||||
if(Shell.WINDOWS) {
|
||||
// windows Dir.setWritable(false) does not work for directories
|
||||
return;
|
||||
}
|
||||
// Windows Dir.setWritable(false) does not work for directories
|
||||
assumeNotWindows();
|
||||
LOG.info("Running test to verify failure of fullyDeleteContents()");
|
||||
setupDirsAndNonWritablePermissions();
|
||||
boolean ret = FileUtil.fullyDeleteContents(new MyFile(del));
|
||||
|
@ -18,13 +18,13 @@
|
||||
|
||||
package org.apache.hadoop.fs;
|
||||
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
|
||||
import static org.hamcrest.CoreMatchers.is;
|
||||
import static org.hamcrest.CoreMatchers.not;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
@ -143,7 +143,7 @@ public void testCopyDirFromLocal() throws Exception {
|
||||
|
||||
@Test
|
||||
public void testCopyFileFromWindowsLocalPath() throws Exception {
|
||||
assumeTrue(Path.WINDOWS);
|
||||
assumeWindows();
|
||||
String windowsTestRootPath = (new File(testRootDir.toUri().getPath()
|
||||
.toString())).getAbsolutePath();
|
||||
Path testRoot = new Path(windowsTestRootPath, "testPutFile");
|
||||
@ -158,7 +158,7 @@ public void testCopyFileFromWindowsLocalPath() throws Exception {
|
||||
|
||||
@Test
|
||||
public void testCopyDirFromWindowsLocalPath() throws Exception {
|
||||
assumeTrue(Path.WINDOWS);
|
||||
assumeWindows();
|
||||
String windowsTestRootPath = (new File(testRootDir.toUri().getPath()
|
||||
.toString())).getAbsolutePath();
|
||||
Path testRoot = new Path(windowsTestRootPath, "testPutDir");
|
||||
@ -485,7 +485,7 @@ public void testMoveDirFromLocalDestExists() throws Exception {
|
||||
|
||||
@Test
|
||||
public void testMoveFromWindowsLocalPath() throws Exception {
|
||||
assumeTrue(Path.WINDOWS);
|
||||
assumeWindows();
|
||||
Path testRoot = new Path(testRootDir, "testPutFile");
|
||||
lfs.delete(testRoot, true);
|
||||
lfs.mkdirs(testRoot);
|
||||
@ -504,7 +504,7 @@ public void testMoveFromWindowsLocalPath() throws Exception {
|
||||
|
||||
@Test
|
||||
public void testGetWindowsLocalPath() throws Exception {
|
||||
assumeTrue(Path.WINDOWS);
|
||||
assumeWindows();
|
||||
String winDstFile = (new File(dstPath.toUri().getPath()
|
||||
.toString())).getAbsolutePath();
|
||||
shellRun(0, "-get", srcPath.toString(), winDstFile);
|
||||
|
@ -34,8 +34,8 @@
|
||||
import org.junit.runners.Parameterized.Parameters;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.junit.Assume.*;
|
||||
|
||||
/** This test LocalDirAllocator works correctly;
|
||||
* Every test case uses different buffer dirs to
|
||||
@ -57,8 +57,6 @@ public class TestLocalDirAllocator {
|
||||
final static private LocalDirAllocator dirAllocator =
|
||||
new LocalDirAllocator(CONTEXT);
|
||||
static LocalFileSystem localFs;
|
||||
final static private boolean isWindows =
|
||||
System.getProperty("os.name").startsWith("Windows");
|
||||
final static int SMALL_FILE_SIZE = 100;
|
||||
final static private String RELATIVE = "/RELATIVE";
|
||||
final static private String ABSOLUTE = "/ABSOLUTE";
|
||||
@ -132,7 +130,7 @@ private String buildBufferDir(String dir, int i) {
|
||||
*/
|
||||
@Test (timeout = 30000)
|
||||
public void test0() throws Exception {
|
||||
if (isWindows) return;
|
||||
assumeNotWindows();
|
||||
String dir0 = buildBufferDir(ROOT, 0);
|
||||
String dir1 = buildBufferDir(ROOT, 1);
|
||||
try {
|
||||
@ -154,7 +152,7 @@ public void test0() throws Exception {
|
||||
*/
|
||||
@Test (timeout = 30000)
|
||||
public void testROBufferDirAndRWBufferDir() throws Exception {
|
||||
if (isWindows) return;
|
||||
assumeNotWindows();
|
||||
String dir1 = buildBufferDir(ROOT, 1);
|
||||
String dir2 = buildBufferDir(ROOT, 2);
|
||||
try {
|
||||
@ -174,7 +172,7 @@ public void testROBufferDirAndRWBufferDir() throws Exception {
|
||||
*/
|
||||
@Test (timeout = 30000)
|
||||
public void testDirsNotExist() throws Exception {
|
||||
if (isWindows) return;
|
||||
assumeNotWindows();
|
||||
String dir2 = buildBufferDir(ROOT, 2);
|
||||
String dir3 = buildBufferDir(ROOT, 3);
|
||||
try {
|
||||
@ -200,7 +198,7 @@ public void testDirsNotExist() throws Exception {
|
||||
*/
|
||||
@Test (timeout = 30000)
|
||||
public void testRWBufferDirBecomesRO() throws Exception {
|
||||
if (isWindows) return;
|
||||
assumeNotWindows();
|
||||
String dir3 = buildBufferDir(ROOT, 3);
|
||||
String dir4 = buildBufferDir(ROOT, 4);
|
||||
try {
|
||||
@ -238,7 +236,7 @@ public void testRWBufferDirBecomesRO() throws Exception {
|
||||
static final int TRIALS = 100;
|
||||
@Test (timeout = 30000)
|
||||
public void testCreateManyFiles() throws Exception {
|
||||
if (isWindows) return;
|
||||
assumeNotWindows();
|
||||
String dir5 = buildBufferDir(ROOT, 5);
|
||||
String dir6 = buildBufferDir(ROOT, 6);
|
||||
try {
|
||||
@ -348,7 +346,7 @@ public void testShouldNotthrowNPE() throws Exception {
|
||||
*/
|
||||
@Test (timeout = 30000)
|
||||
public void testNoSideEffects() throws IOException {
|
||||
assumeTrue(!isWindows);
|
||||
assumeNotWindows();
|
||||
String dir = buildBufferDir(ROOT, 0);
|
||||
try {
|
||||
conf.set(CONTEXT, dir);
|
||||
@ -370,7 +368,7 @@ public void testNoSideEffects() throws IOException {
|
||||
*/
|
||||
@Test (timeout = 30000)
|
||||
public void testGetLocalPathToRead() throws IOException {
|
||||
assumeTrue(!isWindows);
|
||||
assumeNotWindows();
|
||||
String dir = buildBufferDir(ROOT, 0);
|
||||
try {
|
||||
conf.set(CONTEXT, dir);
|
||||
@ -395,7 +393,7 @@ public void testGetLocalPathToRead() throws IOException {
|
||||
*/
|
||||
@Test (timeout = 30000)
|
||||
public void testGetAllLocalPathsToRead() throws IOException {
|
||||
assumeTrue(!isWindows);
|
||||
assumeNotWindows();
|
||||
|
||||
String dir0 = buildBufferDir(ROOT, 0);
|
||||
String dir1 = buildBufferDir(ROOT, 1);
|
||||
|
@ -21,7 +21,6 @@
|
||||
import org.apache.hadoop.fs.FileSystem.Statistics;
|
||||
import org.apache.hadoop.io.IOUtils;
|
||||
import org.apache.hadoop.test.GenericTestUtils;
|
||||
import org.apache.hadoop.util.Shell;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
import static org.apache.hadoop.fs.FileSystemTestHelper.*;
|
||||
@ -31,8 +30,9 @@
|
||||
import java.util.Arrays;
|
||||
import java.util.Random;
|
||||
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
import static org.mockito.Mockito.*;
|
||||
|
||||
import org.junit.After;
|
||||
@ -287,7 +287,7 @@ public void testHasFileDescriptor() throws IOException {
|
||||
|
||||
@Test(timeout = 1000)
|
||||
public void testListStatusWithColons() throws IOException {
|
||||
assumeTrue(!Shell.WINDOWS);
|
||||
assumeNotWindows();
|
||||
File colonFile = new File(TEST_ROOT_DIR, "foo:bar");
|
||||
colonFile.mkdirs();
|
||||
FileStatus[] stats = fileSys.listStatus(new Path(TEST_ROOT_DIR));
|
||||
@ -298,7 +298,7 @@ public void testListStatusWithColons() throws IOException {
|
||||
|
||||
@Test
|
||||
public void testListStatusReturnConsistentPathOnWindows() throws IOException {
|
||||
assumeTrue(Shell.WINDOWS);
|
||||
assumeWindows();
|
||||
String dirNoDriveSpec = TEST_ROOT_DIR;
|
||||
if (dirNoDriveSpec.charAt(1) == ':')
|
||||
dirNoDriveSpec = dirNoDriveSpec.substring(2);
|
||||
|
@ -26,7 +26,10 @@
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
|
||||
import junit.framework.*;
|
||||
import org.junit.Test;
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@ -37,7 +40,7 @@
|
||||
/**
|
||||
* This class tests the local file system via the FileSystem abstraction.
|
||||
*/
|
||||
public class TestLocalFileSystemPermission extends TestCase {
|
||||
public class TestLocalFileSystemPermission {
|
||||
|
||||
public static final Logger LOGGER =
|
||||
LoggerFactory.getLogger(TestFcLocalFsPermission.class);
|
||||
@ -71,11 +74,9 @@ private void cleanup(FileSystem fs, Path name) throws IOException {
|
||||
assertTrue(!fs.exists(name));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLocalFSDirsetPermission() throws IOException {
|
||||
if (Path.WINDOWS) {
|
||||
LOGGER.info("Cannot run test for Windows");
|
||||
return;
|
||||
}
|
||||
assumeNotWindows();
|
||||
LocalFileSystem localfs = FileSystem.getLocal(new Configuration());
|
||||
Configuration conf = localfs.getConf();
|
||||
conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "044");
|
||||
@ -124,11 +125,9 @@ public void testLocalFSDirsetPermission() throws IOException {
|
||||
}
|
||||
|
||||
/** Test LocalFileSystem.setPermission */
|
||||
@Test
|
||||
public void testLocalFSsetPermission() throws IOException {
|
||||
if (Path.WINDOWS) {
|
||||
LOGGER.info("Cannot run test for Windows");
|
||||
return;
|
||||
}
|
||||
assumeNotWindows();
|
||||
Configuration conf = new Configuration();
|
||||
conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, "044");
|
||||
LocalFileSystem localfs = FileSystem.getLocal(conf);
|
||||
@ -195,6 +194,7 @@ FsPermission getPermission(LocalFileSystem fs, Path p) throws IOException {
|
||||
}
|
||||
|
||||
/** Test LocalFileSystem.setOwner. */
|
||||
@Test
|
||||
public void testLocalFSsetOwner() throws IOException {
|
||||
if (Path.WINDOWS) {
|
||||
LOGGER.info("Cannot run test for Windows");
|
||||
@ -248,6 +248,7 @@ public void testLocalFSsetOwner() throws IOException {
|
||||
* 5. For this directory we expect 715 as permission not 755
|
||||
* @throws Exception we can throw away all the exception.
|
||||
*/
|
||||
@Test
|
||||
public void testSetUmaskInRealTime() throws Exception {
|
||||
if (Path.WINDOWS) {
|
||||
LOGGER.info("Cannot run test for Windows");
|
||||
|
@ -31,9 +31,17 @@
|
||||
|
||||
import com.google.common.base.Joiner;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
public class TestPath extends TestCase {
|
||||
/**
|
||||
* Test Hadoop Filesystem Paths.
|
||||
*/
|
||||
public class TestPath {
|
||||
/**
|
||||
* Merge a bunch of Path objects into a sorted semicolon-separated
|
||||
* path string.
|
||||
@ -242,9 +250,7 @@ public void testDots() {
|
||||
/** Test that Windows paths are correctly handled */
|
||||
@Test (timeout = 5000)
|
||||
public void testWindowsPaths() throws URISyntaxException, IOException {
|
||||
if (!Path.WINDOWS) {
|
||||
return;
|
||||
}
|
||||
assumeWindows();
|
||||
|
||||
assertEquals(new Path("c:\\foo\\bar").toString(), "c:/foo/bar");
|
||||
assertEquals(new Path("c:/foo/bar").toString(), "c:/foo/bar");
|
||||
@ -255,9 +261,7 @@ public void testWindowsPaths() throws URISyntaxException, IOException {
|
||||
/** Test invalid paths on Windows are correctly rejected */
|
||||
@Test (timeout = 5000)
|
||||
public void testInvalidWindowsPaths() throws URISyntaxException, IOException {
|
||||
if (!Path.WINDOWS) {
|
||||
return;
|
||||
}
|
||||
assumeWindows();
|
||||
|
||||
String [] invalidPaths = {
|
||||
"hdfs:\\\\\\tmp"
|
||||
@ -401,7 +405,7 @@ public void testAvroReflect() throws Exception {
|
||||
@Test (timeout = 30000)
|
||||
public void testGlobEscapeStatus() throws Exception {
|
||||
// This test is not meaningful on Windows where * is disallowed in file name.
|
||||
if (Shell.WINDOWS) return;
|
||||
assumeNotWindows();
|
||||
FileSystem lfs = FileSystem.getLocal(new Configuration());
|
||||
Path testRoot = lfs.makeQualified(
|
||||
new Path(GenericTestUtils.getTempPath("testPathGlob")));
|
||||
@ -493,7 +497,7 @@ public void testMergePaths() {
|
||||
|
||||
@Test (timeout = 30000)
|
||||
public void testIsWindowsAbsolutePath() {
|
||||
if (!Shell.WINDOWS) return;
|
||||
assumeWindows();
|
||||
assertTrue(Path.isWindowsAbsolutePath("C:\\test", false));
|
||||
assertTrue(Path.isWindowsAbsolutePath("C:/test", false));
|
||||
assertTrue(Path.isWindowsAbsolutePath("/C:/test", true));
|
||||
|
@ -17,6 +17,7 @@
|
||||
*/
|
||||
package org.apache.hadoop.fs;
|
||||
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
@ -29,7 +30,6 @@
|
||||
import java.net.URI;
|
||||
import java.net.URISyntaxException;
|
||||
|
||||
import org.apache.hadoop.fs.permission.FsPermission;
|
||||
import org.apache.hadoop.util.Shell;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.junit.Test;
|
||||
@ -71,37 +71,37 @@ protected URI testURI() {
|
||||
@Override
|
||||
public void testCreateDanglingLink() throws IOException {
|
||||
// Dangling symlinks are not supported on Windows local file system.
|
||||
assumeTrue(!Path.WINDOWS);
|
||||
assumeNotWindows();
|
||||
super.testCreateDanglingLink();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testCreateFileViaDanglingLinkParent() throws IOException {
|
||||
assumeTrue(!Path.WINDOWS);
|
||||
assumeNotWindows();
|
||||
super.testCreateFileViaDanglingLinkParent();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testOpenResolvesLinks() throws IOException {
|
||||
assumeTrue(!Path.WINDOWS);
|
||||
assumeNotWindows();
|
||||
super.testOpenResolvesLinks();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testRecursiveLinks() throws IOException {
|
||||
assumeTrue(!Path.WINDOWS);
|
||||
assumeNotWindows();
|
||||
super.testRecursiveLinks();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testRenameDirToDanglingSymlink() throws IOException {
|
||||
assumeTrue(!Path.WINDOWS);
|
||||
assumeNotWindows();
|
||||
super.testRenameDirToDanglingSymlink();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void testStatDanglingLink() throws IOException {
|
||||
assumeTrue(!Path.WINDOWS);
|
||||
assumeNotWindows();
|
||||
super.testStatDanglingLink();
|
||||
}
|
||||
|
||||
@ -126,7 +126,7 @@ public void testDanglingLinkFilePartQual() throws IOException {
|
||||
@Test(timeout=1000)
|
||||
/** Stat and lstat a dangling link */
|
||||
public void testDanglingLink() throws IOException {
|
||||
assumeTrue(!Path.WINDOWS);
|
||||
assumeNotWindows();
|
||||
Path fileAbs = new Path(testBaseDir1()+"/file");
|
||||
Path fileQual = new Path(testURI().toString(), fileAbs);
|
||||
Path link = new Path(testBaseDir1()+"/linkToFile");
|
||||
@ -235,7 +235,7 @@ public void testSetTimesSymlinkToDir() throws IOException {
|
||||
|
||||
@Override
|
||||
public void testSetTimesDanglingLink() throws IOException {
|
||||
assumeTrue(!Path.WINDOWS);
|
||||
assumeNotWindows();
|
||||
super.testSetTimesDanglingLink();
|
||||
}
|
||||
}
|
||||
|
@ -17,12 +17,11 @@
|
||||
*/
|
||||
package org.apache.hadoop.fs;
|
||||
|
||||
import org.apache.hadoop.util.Shell;
|
||||
import org.junit.BeforeClass;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
|
||||
|
||||
public class TestSymlinkLocalFSFileContext extends TestSymlinkLocalFS {
|
||||
|
||||
@ -34,7 +33,7 @@ public static void testSetup() throws Exception {
|
||||
|
||||
@Override
|
||||
public void testRenameFileWithDestParentSymlink() throws IOException {
|
||||
assumeTrue(!Shell.WINDOWS);
|
||||
assumeNotWindows();
|
||||
super.testRenameFileWithDestParentSymlink();
|
||||
}
|
||||
}
|
||||
|
@ -22,14 +22,13 @@
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.Options.Rename;
|
||||
import org.apache.hadoop.util.Shell;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
|
||||
public class TestSymlinkLocalFSFileSystem extends TestSymlinkLocalFS {
|
||||
|
||||
@ -64,7 +63,7 @@ public void testAccessFileViaInterSymlinkAbsTarget() throws IOException {}
|
||||
|
||||
@Override
|
||||
public void testRenameFileWithDestParentSymlink() throws IOException {
|
||||
assumeTrue(!Shell.WINDOWS);
|
||||
assumeNotWindows();
|
||||
super.testRenameFileWithDestParentSymlink();
|
||||
}
|
||||
|
||||
|
@ -30,7 +30,6 @@
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.test.GenericTestUtils;
|
||||
import org.apache.hadoop.util.Shell;
|
||||
|
||||
import org.apache.sshd.SshServer;
|
||||
import org.apache.sshd.common.NamedFactory;
|
||||
@ -48,8 +47,8 @@
|
||||
import org.junit.Test;
|
||||
import org.junit.rules.TestName;
|
||||
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
|
||||
public class TestSFTPFileSystem {
|
||||
|
||||
@ -99,7 +98,7 @@ public boolean authenticate(String username, String password,
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
// skip all tests if running on Windows
|
||||
assumeTrue(!Shell.WINDOWS);
|
||||
assumeNotWindows();
|
||||
|
||||
startSshdServer();
|
||||
|
||||
|
@ -17,6 +17,7 @@
|
||||
*/
|
||||
package org.apache.hadoop.fs.shell;
|
||||
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
@ -126,9 +127,7 @@ public void testToFile() throws Exception {
|
||||
|
||||
@Test (timeout = 5000)
|
||||
public void testToFileRawWindowsPaths() throws Exception {
|
||||
if (!Path.WINDOWS) {
|
||||
return;
|
||||
}
|
||||
assumeWindows();
|
||||
|
||||
// Can we handle raw Windows paths? The files need not exist for
|
||||
// these tests to succeed.
|
||||
@ -155,9 +154,7 @@ public void testToFileRawWindowsPaths() throws Exception {
|
||||
|
||||
@Test (timeout = 5000)
|
||||
public void testInvalidWindowsPath() throws Exception {
|
||||
if (!Path.WINDOWS) {
|
||||
return;
|
||||
}
|
||||
assumeWindows();
|
||||
|
||||
// Verify that the following invalid paths are rejected.
|
||||
String [] winPaths = {
|
||||
|
@ -42,6 +42,8 @@
|
||||
|
||||
import static org.junit.Assume.*;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
@ -107,9 +109,7 @@ public void testFstat() throws Exception {
|
||||
*/
|
||||
@Test (timeout = 30000)
|
||||
public void testMultiThreadedFstat() throws Exception {
|
||||
if (Path.WINDOWS) {
|
||||
return;
|
||||
}
|
||||
assumeNotWindows();
|
||||
|
||||
final FileOutputStream fos = new FileOutputStream(
|
||||
new File(TEST_DIR, "testfstat"));
|
||||
@ -165,9 +165,7 @@ public void testFstatClosedFd() throws Exception {
|
||||
|
||||
@Test (timeout = 30000)
|
||||
public void testSetFilePointer() throws Exception {
|
||||
if (!Path.WINDOWS) {
|
||||
return;
|
||||
}
|
||||
assumeWindows();
|
||||
|
||||
LOG.info("Set a file pointer on Windows");
|
||||
try {
|
||||
@ -212,9 +210,7 @@ public void testSetFilePointer() throws Exception {
|
||||
|
||||
@Test (timeout = 30000)
|
||||
public void testCreateFile() throws Exception {
|
||||
if (!Path.WINDOWS) {
|
||||
return;
|
||||
}
|
||||
assumeWindows();
|
||||
|
||||
LOG.info("Open a file on Windows with SHARE_DELETE shared mode");
|
||||
try {
|
||||
@ -255,9 +251,7 @@ public void testCreateFile() throws Exception {
|
||||
/** Validate access checks on Windows */
|
||||
@Test (timeout = 30000)
|
||||
public void testAccess() throws Exception {
|
||||
if (!Path.WINDOWS) {
|
||||
return;
|
||||
}
|
||||
assumeWindows();
|
||||
|
||||
File testFile = new File(TEST_DIR, "testfileaccess");
|
||||
assertTrue(testFile.createNewFile());
|
||||
@ -331,9 +325,7 @@ public void testAccess() throws Exception {
|
||||
|
||||
@Test (timeout = 30000)
|
||||
public void testOpenMissingWithoutCreate() throws Exception {
|
||||
if (Path.WINDOWS) {
|
||||
return;
|
||||
}
|
||||
assumeNotWindows();
|
||||
|
||||
LOG.info("Open a missing file without O_CREAT and it should fail");
|
||||
try {
|
||||
@ -348,9 +340,7 @@ public void testOpenMissingWithoutCreate() throws Exception {
|
||||
|
||||
@Test (timeout = 30000)
|
||||
public void testOpenWithCreate() throws Exception {
|
||||
if (Path.WINDOWS) {
|
||||
return;
|
||||
}
|
||||
assumeNotWindows();
|
||||
|
||||
LOG.info("Test creating a file with O_CREAT");
|
||||
FileDescriptor fd = NativeIO.POSIX.open(
|
||||
@ -382,9 +372,7 @@ public void testOpenWithCreate() throws Exception {
|
||||
*/
|
||||
@Test (timeout = 30000)
|
||||
public void testFDDoesntLeak() throws IOException {
|
||||
if (Path.WINDOWS) {
|
||||
return;
|
||||
}
|
||||
assumeNotWindows();
|
||||
|
||||
for (int i = 0; i < 10000; i++) {
|
||||
FileDescriptor fd = NativeIO.POSIX.open(
|
||||
@ -403,9 +391,7 @@ public void testFDDoesntLeak() throws IOException {
|
||||
*/
|
||||
@Test (timeout = 30000)
|
||||
public void testChmod() throws Exception {
|
||||
if (Path.WINDOWS) {
|
||||
return;
|
||||
}
|
||||
assumeNotWindows();
|
||||
|
||||
try {
|
||||
NativeIO.POSIX.chmod("/this/file/doesnt/exist", 777);
|
||||
@ -428,9 +414,7 @@ public void testChmod() throws Exception {
|
||||
|
||||
@Test (timeout = 30000)
|
||||
public void testPosixFadvise() throws Exception {
|
||||
if (Path.WINDOWS) {
|
||||
return;
|
||||
}
|
||||
assumeNotWindows();
|
||||
|
||||
FileInputStream fis = new FileInputStream("/dev/zero");
|
||||
try {
|
||||
@ -497,19 +481,13 @@ private void assertPermissions(File f, int expected) throws IOException {
|
||||
|
||||
@Test (timeout = 30000)
|
||||
public void testGetUserName() throws IOException {
|
||||
if (Path.WINDOWS) {
|
||||
return;
|
||||
}
|
||||
|
||||
assumeNotWindows();
|
||||
assertFalse(NativeIO.POSIX.getUserName(0).isEmpty());
|
||||
}
|
||||
|
||||
@Test (timeout = 30000)
|
||||
public void testGetGroupName() throws IOException {
|
||||
if (Path.WINDOWS) {
|
||||
return;
|
||||
}
|
||||
|
||||
assumeNotWindows();
|
||||
assertFalse(NativeIO.POSIX.getGroupName(0).isEmpty());
|
||||
}
|
||||
|
||||
@ -647,8 +625,7 @@ public void testCopyFileUnbuffered() throws Exception {
|
||||
|
||||
@Test (timeout=10000)
|
||||
public void testNativePosixConsts() {
|
||||
assumeTrue("Native POSIX constants not required for Windows",
|
||||
!Path.WINDOWS);
|
||||
assumeNotWindows("Native POSIX constants not required for Windows");
|
||||
assertTrue("Native 0_RDONLY const not set", O_RDONLY >= 0);
|
||||
assertTrue("Native 0_WRONLY const not set", O_WRONLY >= 0);
|
||||
assertTrue("Native 0_RDWR const not set", O_RDWR >= 0);
|
||||
|
@ -30,15 +30,14 @@
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.util.Shell;
|
||||
import org.apache.hadoop.util.Time;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
|
||||
import static org.hamcrest.CoreMatchers.not;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
|
||||
/**
|
||||
* Test host name and IP resolution and caching.
|
||||
@ -197,7 +196,7 @@ public void testRDNS() throws Exception {
|
||||
*/
|
||||
@Test (timeout=60000)
|
||||
public void testLookupWithHostsFallback() throws Exception {
|
||||
assumeTrue(!Shell.WINDOWS);
|
||||
assumeNotWindows();
|
||||
final String oldHostname = changeDnsCachedHostname(DUMMY_HOSTNAME);
|
||||
|
||||
try {
|
||||
|
@ -17,9 +17,9 @@
|
||||
*/
|
||||
package org.apache.hadoop.security;
|
||||
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
@ -28,7 +28,6 @@
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.util.Shell;
|
||||
import org.apache.hadoop.security.ShellBasedIdMapping.PassThroughMap;
|
||||
import org.apache.hadoop.security.ShellBasedIdMapping.StaticMapping;
|
||||
import org.junit.Test;
|
||||
@ -87,7 +86,7 @@ public void testStaticMapParsing() throws IOException {
|
||||
|
||||
@Test
|
||||
public void testStaticMapping() throws IOException {
|
||||
assumeTrue(!Shell.WINDOWS);
|
||||
assumeNotWindows();
|
||||
Map<Integer, Integer> uidStaticMap = new PassThroughMap<Integer>();
|
||||
Map<Integer, Integer> gidStaticMap = new PassThroughMap<Integer>();
|
||||
|
||||
@ -129,7 +128,7 @@ public void testStaticMapping() throws IOException {
|
||||
// Test staticMap refreshing
|
||||
@Test
|
||||
public void testStaticMapUpdate() throws IOException {
|
||||
assumeTrue(!Shell.WINDOWS);
|
||||
assumeNotWindows();
|
||||
File tempStaticMapFile = File.createTempFile("nfs-", ".map");
|
||||
tempStaticMapFile.delete();
|
||||
Configuration conf = new Configuration();
|
||||
@ -207,7 +206,7 @@ public void testStaticMapUpdate() throws IOException {
|
||||
|
||||
@Test
|
||||
public void testDuplicates() throws IOException {
|
||||
assumeTrue(!Shell.WINDOWS);
|
||||
assumeNotWindows();
|
||||
String GET_ALL_USERS_CMD = "echo \"root:x:0:0:root:/root:/bin/bash\n"
|
||||
+ "hdfs:x:11501:10787:Grid Distributed File System:/home/hdfs:/bin/bash\n"
|
||||
+ "hdfs:x:11502:10788:Grid Distributed File System:/home/hdfs:/bin/bash\n"
|
||||
@ -247,7 +246,7 @@ public void testDuplicates() throws IOException {
|
||||
|
||||
@Test
|
||||
public void testIdOutOfIntegerRange() throws IOException {
|
||||
assumeTrue(!Shell.WINDOWS);
|
||||
assumeNotWindows();
|
||||
String GET_ALL_USERS_CMD = "echo \""
|
||||
+ "nfsnobody:x:4294967294:4294967294:Anonymous NFS User:/var/lib/nfs:/sbin/nologin\n"
|
||||
+ "nfsnobody1:x:4294967295:4294967295:Anonymous NFS User:/var/lib/nfs1:/sbin/nologin\n"
|
||||
|
@ -0,0 +1,47 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.test;
|
||||
|
||||
import org.junit.internal.AssumptionViolatedException;
|
||||
|
||||
/**
|
||||
* JUnit assumptions for the environment (OS).
|
||||
*/
|
||||
public final class PlatformAssumptions {
|
||||
public static final String OS_NAME = System.getProperty("os.name");
|
||||
public static final boolean WINDOWS = OS_NAME.startsWith("Windows");
|
||||
|
||||
private PlatformAssumptions() { }
|
||||
|
||||
public static void assumeNotWindows() {
|
||||
assumeNotWindows("Expected Unix-like platform but got " + OS_NAME);
|
||||
}
|
||||
|
||||
public static void assumeNotWindows(String message) {
|
||||
if (WINDOWS) {
|
||||
throw new AssumptionViolatedException(message);
|
||||
}
|
||||
}
|
||||
|
||||
public static void assumeWindows() {
|
||||
if (!WINDOWS) {
|
||||
throw new AssumptionViolatedException(
|
||||
"Expected Windows platform but got " + OS_NAME);
|
||||
}
|
||||
}
|
||||
}
|
@ -18,8 +18,8 @@
|
||||
|
||||
package org.apache.hadoop.util;
|
||||
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
import static org.junit.matchers.JUnitMatchers.containsString;
|
||||
|
||||
import java.io.File;
|
||||
@ -53,7 +53,7 @@ public class TestWinUtils {
|
||||
@Before
|
||||
public void setUp() throws IOException {
|
||||
// Not supported on non-Windows platforms
|
||||
assumeTrue(Shell.WINDOWS);
|
||||
assumeWindows();
|
||||
TEST_DIR.mkdirs();
|
||||
assertTrue("Failed to create Test directory " + TEST_DIR,
|
||||
TEST_DIR.isDirectory() );
|
||||
|
@ -17,10 +17,10 @@
|
||||
*/
|
||||
package org.apache.hadoop.fs;
|
||||
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.security.PrivilegedExceptionAction;
|
||||
import java.util.ArrayList;
|
||||
import java.util.UUID;
|
||||
import java.util.regex.Pattern;
|
||||
@ -495,7 +495,7 @@ public void pTestLiteral() throws IOException {
|
||||
public void pTestEscape() throws IOException {
|
||||
// Skip the test case on Windows because backslash will be treated as a
|
||||
// path separator instead of an escaping character on Windows.
|
||||
org.junit.Assume.assumeTrue(!Path.WINDOWS);
|
||||
assumeNotWindows();
|
||||
try {
|
||||
String [] files = new String[] {USER_DIR+"/ab\\[c.d"};
|
||||
Path[] matchedPath = prepareTesting(USER_DIR+"/ab\\[c.d", files);
|
||||
|
@ -34,6 +34,7 @@
|
||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_KEYSTORE_PASSWORD_KEY;
|
||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_SERVER_HTTPS_TRUSTSTORE_PASSWORD_KEY;
|
||||
import static org.apache.hadoop.test.GenericTestUtils.assertExceptionContains;
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
|
||||
import static org.hamcrest.CoreMatchers.not;
|
||||
import static org.junit.Assert.assertArrayEquals;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
@ -73,9 +74,7 @@
|
||||
import org.apache.hadoop.security.alias.CredentialProviderFactory;
|
||||
import org.apache.hadoop.security.alias.JavaKeyStoreProvider;
|
||||
import org.apache.hadoop.test.GenericTestUtils;
|
||||
import org.apache.hadoop.util.Shell;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Assume;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
@ -800,7 +799,7 @@ public void testGetNNUris() throws Exception {
|
||||
@Test (timeout=15000)
|
||||
public void testLocalhostReverseLookup() {
|
||||
// 127.0.0.1 -> localhost reverse resolution does not happen on Windows.
|
||||
Assume.assumeTrue(!Shell.WINDOWS);
|
||||
assumeNotWindows();
|
||||
|
||||
// Make sure when config FS_DEFAULT_NAME_KEY using IP address,
|
||||
// it will automatically convert it to hostname
|
||||
|
@ -43,10 +43,10 @@
|
||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_KEYTAB_FILE_KEY;
|
||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_NAMENODE_LAZY_PERSIST_FILE_SCRUB_INTERVAL_SEC;
|
||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_WEB_AUTHENTICATION_KERBEROS_PRINCIPAL_KEY;
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
@ -451,7 +451,7 @@ public void testBalancerWithPinnedBlocks() throws Exception {
|
||||
// This test assumes stick-bit based block pin mechanism available only
|
||||
// in Linux/Unix. It can be unblocked on Windows when HDFS-7759 is ready to
|
||||
// provide a different mechanism for Windows.
|
||||
assumeTrue(!Path.WINDOWS);
|
||||
assumeNotWindows();
|
||||
|
||||
final Configuration conf = new HdfsConfiguration();
|
||||
initConf(conf);
|
||||
|
@ -17,9 +17,8 @@
|
||||
*/
|
||||
package org.apache.hadoop.hdfs.server.blockmanagement;
|
||||
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
@ -70,7 +69,7 @@ public void testBlockInvalidationWhenRBWReplicaMissedInDN()
|
||||
throws IOException, InterruptedException {
|
||||
// This test cannot pass on Windows due to file locking enforcement. It will
|
||||
// reject the attempt to delete the block file from the RBW folder.
|
||||
assumeTrue(!Path.WINDOWS);
|
||||
assumeNotWindows();
|
||||
|
||||
Configuration conf = new HdfsConfiguration();
|
||||
conf.setInt(DFSConfigKeys.DFS_REPLICATION_KEY, 2);
|
||||
|
@ -72,6 +72,7 @@
|
||||
import org.mockito.stubbing.Answer;
|
||||
|
||||
import static org.apache.hadoop.hdfs.DFSConfigKeys.DFS_DATANODE_DATA_DIR_KEY;
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
|
||||
import static org.hamcrest.CoreMatchers.anyOf;
|
||||
import static org.hamcrest.CoreMatchers.containsString;
|
||||
import static org.hamcrest.CoreMatchers.not;
|
||||
@ -81,7 +82,6 @@
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Matchers.anyString;
|
||||
import static org.mockito.Mockito.doAnswer;
|
||||
@ -784,7 +784,7 @@ public void testDirectlyReloadAfterCheckDiskError()
|
||||
ReconfigurationException {
|
||||
// The test uses DataNodeTestUtils#injectDataDirFailure() to simulate
|
||||
// volume failures which is currently not supported on Windows.
|
||||
assumeTrue(!Path.WINDOWS);
|
||||
assumeNotWindows();
|
||||
|
||||
startDFSCluster(1, 2);
|
||||
createFile(new Path("/test"), 32, (short)2);
|
||||
|
@ -17,6 +17,7 @@
|
||||
*/
|
||||
package org.apache.hadoop.hdfs.server.datanode;
|
||||
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
@ -24,7 +25,6 @@
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
@ -219,7 +219,7 @@ public void testFailedVolumeBeingRemovedFromDataNode()
|
||||
throws InterruptedException, IOException, TimeoutException {
|
||||
// The test uses DataNodeTestUtils#injectDataDirFailure() to simulate
|
||||
// volume failures which is currently not supported on Windows.
|
||||
assumeTrue(!Path.WINDOWS);
|
||||
assumeNotWindows();
|
||||
|
||||
Path file1 = new Path("/test1");
|
||||
DFSTestUtil.createFile(fs, file1, 1024, (short) 2, 1L);
|
||||
@ -384,7 +384,7 @@ public void testTolerateVolumeFailuresAfterAddingMoreVolumes()
|
||||
public void testUnderReplicationAfterVolFailure() throws Exception {
|
||||
// The test uses DataNodeTestUtils#injectDataDirFailure() to simulate
|
||||
// volume failures which is currently not supported on Windows.
|
||||
assumeTrue(!Path.WINDOWS);
|
||||
assumeNotWindows();
|
||||
|
||||
// Bring up one more datanode
|
||||
cluster.startDataNodes(conf, 1, true, null, null);
|
||||
|
@ -19,6 +19,7 @@
|
||||
|
||||
import static org.apache.hadoop.test.MetricsAsserts.assertCounter;
|
||||
import static org.apache.hadoop.test.MetricsAsserts.getMetrics;
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.junit.Assert.assertArrayEquals;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
@ -26,7 +27,6 @@
|
||||
import static org.junit.Assert.assertNull;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
|
||||
import java.io.File;
|
||||
import java.util.ArrayList;
|
||||
@ -82,7 +82,7 @@ public class TestDataNodeVolumeFailureReporting {
|
||||
public void setUp() throws Exception {
|
||||
// These tests use DataNodeTestUtils#injectDataDirFailure() to simulate
|
||||
// volume failures which is currently not supported on Windows.
|
||||
assumeTrue(!Path.WINDOWS);
|
||||
assumeNotWindows();
|
||||
// Allow a single volume failure (there are two volumes)
|
||||
initCluster(1, 2, 1);
|
||||
}
|
||||
|
@ -17,10 +17,10 @@
|
||||
*/
|
||||
package org.apache.hadoop.hdfs.server.datanode;
|
||||
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
@ -91,7 +91,7 @@ public void tearDown() throws Exception {
|
||||
*/
|
||||
@Test
|
||||
public void testValidVolumesAtStartup() throws Exception {
|
||||
assumeTrue(!System.getProperty("os.name").startsWith("Windows"));
|
||||
assumeNotWindows();
|
||||
|
||||
// Make sure no DNs are running.
|
||||
cluster.shutdownDataNodes();
|
||||
@ -139,7 +139,7 @@ public void testValidVolumesAtStartup() throws Exception {
|
||||
*/
|
||||
@Test
|
||||
public void testConfigureMinValidVolumes() throws Exception {
|
||||
assumeTrue(!System.getProperty("os.name").startsWith("Windows"));
|
||||
assumeNotWindows();
|
||||
|
||||
// Bring up two additional datanodes that need both of their volumes
|
||||
// functioning in order to stay up.
|
||||
@ -218,7 +218,7 @@ public void testVolumeAndTolerableConfiguration() throws Exception {
|
||||
private void testVolumeConfig(int volumesTolerated, int volumesFailed,
|
||||
boolean expectedBPServiceState, boolean manageDfsDirs)
|
||||
throws IOException, InterruptedException {
|
||||
assumeTrue(!System.getProperty("os.name").startsWith("Windows"));
|
||||
assumeNotWindows();
|
||||
final int dnIndex = 0;
|
||||
// Fail the current directory since invalid storage directory perms
|
||||
// get fixed up automatically on datanode startup.
|
||||
@ -272,7 +272,7 @@ private void prepareDirToFail(File dir) throws IOException,
|
||||
*/
|
||||
@Test
|
||||
public void testFailedVolumeOnStartupIsCounted() throws Exception {
|
||||
assumeTrue(!System.getProperty("os.name").startsWith("Windows"));
|
||||
assumeNotWindows();
|
||||
final DatanodeManager dm = cluster.getNamesystem().getBlockManager(
|
||||
).getDatanodeManager();
|
||||
long origCapacity = DFSTestUtil.getLiveDatanodeCapacity(dm);
|
||||
|
@ -17,6 +17,7 @@
|
||||
*/
|
||||
package org.apache.hadoop.hdfs.server.datanode;
|
||||
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
|
||||
import java.io.File;
|
||||
@ -96,7 +97,8 @@ private static Configuration getDefaultConf() {
|
||||
*/
|
||||
@Test(timeout=120000)
|
||||
public void testPinning() throws Exception {
|
||||
assumeTrue(NativeCodeLoader.isNativeCodeLoaded() && !Path.WINDOWS);
|
||||
assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
|
||||
assumeNotWindows();
|
||||
Configuration conf = getDefaultConf();
|
||||
// Set a really long revocation timeout, so that we won't reach it during
|
||||
// this test.
|
||||
@ -146,7 +148,8 @@ public void testPinning() throws Exception {
|
||||
*/
|
||||
@Test(timeout=120000)
|
||||
public void testRevocation() throws Exception {
|
||||
assumeTrue(NativeCodeLoader.isNativeCodeLoaded() && !Path.WINDOWS);
|
||||
assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
|
||||
assumeNotWindows();
|
||||
BlockReaderTestUtil.enableHdfsCachingTracing();
|
||||
BlockReaderTestUtil.enableShortCircuitShmTracing();
|
||||
Configuration conf = getDefaultConf();
|
||||
|
@ -42,6 +42,7 @@
|
||||
|
||||
import static org.apache.hadoop.fs.StorageType.DEFAULT;
|
||||
import static org.apache.hadoop.fs.StorageType.RAM_DISK;
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
|
||||
import static org.hamcrest.CoreMatchers.equalTo;
|
||||
import static org.hamcrest.core.Is.is;
|
||||
import static org.junit.Assert.assertThat;
|
||||
@ -61,8 +62,8 @@ public static void init() {
|
||||
|
||||
@Before
|
||||
public void before() {
|
||||
Assume.assumeThat(NativeCodeLoader.isNativeCodeLoaded() && !Path.WINDOWS,
|
||||
equalTo(true));
|
||||
Assume.assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
|
||||
assumeNotWindows();
|
||||
Assume.assumeThat(DomainSocket.getLoadingFailureReason(), equalTo(null));
|
||||
|
||||
final long osPageSize = NativeIO.POSIX.getCacheManipulator().getOperatingSystemPageSize();
|
||||
|
@ -17,11 +17,11 @@
|
||||
*/
|
||||
package org.apache.hadoop.hdfs.util;
|
||||
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
@ -33,7 +33,6 @@
|
||||
import org.apache.hadoop.hdfs.DFSTestUtil;
|
||||
import org.apache.hadoop.io.IOUtils;
|
||||
import org.apache.hadoop.test.PathUtils;
|
||||
import org.apache.hadoop.util.Shell;
|
||||
import org.junit.Before;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
@ -129,7 +128,7 @@ public void testFailToFlush() throws IOException {
|
||||
|
||||
@Test
|
||||
public void testFailToRename() throws IOException {
|
||||
assumeTrue(Shell.WINDOWS);
|
||||
assumeWindows();
|
||||
OutputStream fos = null;
|
||||
try {
|
||||
fos = new AtomicFileOutputStream(DST_FILE);
|
||||
|
@ -17,6 +17,7 @@
|
||||
*/
|
||||
package org.apache.hadoop.tracing;
|
||||
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
|
||||
import java.io.File;
|
||||
@ -24,7 +25,6 @@
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FSDataInputStream;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.FsTracer;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||
@ -63,7 +63,8 @@ public static void shutdown() throws IOException {
|
||||
|
||||
@Test
|
||||
public void testShortCircuitTraceHooks() throws IOException {
|
||||
assumeTrue(NativeCodeLoader.isNativeCodeLoaded() && !Path.WINDOWS);
|
||||
assumeTrue(NativeCodeLoader.isNativeCodeLoaded());
|
||||
assumeNotWindows();
|
||||
conf = new Configuration();
|
||||
conf.set(TraceUtils.DEFAULT_HADOOP_TRACE_PREFIX +
|
||||
Tracer.SPAN_RECEIVER_CLASSES_KEY,
|
||||
|
@ -18,6 +18,7 @@
|
||||
|
||||
package org.apache.hadoop.mapred;
|
||||
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
@ -65,7 +66,6 @@
|
||||
import org.apache.hadoop.security.SecurityUtil;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hadoop.security.token.Token;
|
||||
import org.apache.hadoop.util.Shell;
|
||||
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
|
||||
import org.apache.hadoop.yarn.api.ApplicationConstants;
|
||||
import org.apache.hadoop.yarn.api.ApplicationConstants.Environment;
|
||||
@ -586,9 +586,7 @@ private void testAMStandardEnv(boolean customLibPath) throws Exception {
|
||||
// the Windows behavior is different and this test currently doesn't really
|
||||
// apply
|
||||
// MAPREDUCE-6588 should revisit this test
|
||||
if (Shell.WINDOWS) {
|
||||
return;
|
||||
}
|
||||
assumeNotWindows();
|
||||
|
||||
final String ADMIN_LIB_PATH = "foo";
|
||||
final String USER_LIB_PATH = "bar";
|
||||
|
@ -18,7 +18,7 @@
|
||||
|
||||
package org.apache.hadoop.fs.azure;
|
||||
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
|
||||
|
||||
import org.apache.hadoop.fs.FSMainOperationsBaseTest;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
@ -48,7 +48,7 @@ public void testListStatusThrowsExceptionForUnreadableDir() throws Exception {
|
||||
System.out
|
||||
.println("Skipping testListStatusThrowsExceptionForUnreadableDir since WASB"
|
||||
+ " doesn't honor directory permissions.");
|
||||
assumeTrue(!Path.WINDOWS);
|
||||
assumeNotWindows();
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -18,6 +18,7 @@
|
||||
|
||||
package org.apache.hadoop.fs.azure;
|
||||
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
import java.io.File;
|
||||
@ -26,7 +27,6 @@
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.util.Shell;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
@ -38,9 +38,7 @@ public class TestShellDecryptionKeyProvider {
|
||||
|
||||
@Test
|
||||
public void testScriptPathNotSpecified() throws Exception {
|
||||
if (!Shell.WINDOWS) {
|
||||
return;
|
||||
}
|
||||
assumeWindows();
|
||||
ShellDecryptionKeyProvider provider = new ShellDecryptionKeyProvider();
|
||||
Configuration conf = new Configuration();
|
||||
String account = "testacct";
|
||||
@ -58,9 +56,7 @@ public void testScriptPathNotSpecified() throws Exception {
|
||||
|
||||
@Test
|
||||
public void testValidScript() throws Exception {
|
||||
if (!Shell.WINDOWS) {
|
||||
return;
|
||||
}
|
||||
assumeWindows();
|
||||
String expectedResult = "decretedKey";
|
||||
|
||||
// Create a simple script which echoes the given key plus the given
|
||||
|
@ -20,10 +20,10 @@
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.util.Shell;
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class TestWindowsBasedProcessTree {
|
||||
@ -45,10 +45,7 @@ String getAllProcessInfoFromShell() {
|
||||
@Test (timeout = 30000)
|
||||
@SuppressWarnings("deprecation")
|
||||
public void tree() {
|
||||
if( !Shell.WINDOWS) {
|
||||
LOG.info("Platform not Windows. Not testing");
|
||||
return;
|
||||
}
|
||||
assumeWindows();
|
||||
assertTrue("WindowsBasedProcessTree should be available on Windows",
|
||||
WindowsBasedProcessTree.isAvailable());
|
||||
ControlledClock testClock = new ControlledClock();
|
||||
|
@ -31,8 +31,8 @@
|
||||
import org.junit.Assert;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
|
||||
import static org.junit.Assert.*;
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public class TestContainerExecutor {
|
||||
@ -80,8 +80,7 @@ public void testRunCommandwithPriority() throws Exception {
|
||||
|
||||
@Test (timeout = 5000)
|
||||
public void testRunCommandWithNoResources() {
|
||||
// Windows only test
|
||||
assumeTrue(Shell.WINDOWS);
|
||||
assumeWindows();
|
||||
Configuration conf = new Configuration();
|
||||
String[] command = containerExecutor.getRunCommand("echo", "group1", null, null,
|
||||
conf, Resource.newInstance(1024, 1));
|
||||
@ -93,8 +92,7 @@ public void testRunCommandWithNoResources() {
|
||||
|
||||
@Test (timeout = 5000)
|
||||
public void testRunCommandWithMemoryOnlyResources() {
|
||||
// Windows only test
|
||||
assumeTrue(Shell.WINDOWS);
|
||||
assumeWindows();
|
||||
Configuration conf = new Configuration();
|
||||
conf.set(YarnConfiguration.NM_WINDOWS_CONTAINER_MEMORY_LIMIT_ENABLED, "true");
|
||||
String[] command = containerExecutor.getRunCommand("echo", "group1", null, null,
|
||||
@ -107,8 +105,7 @@ public void testRunCommandWithMemoryOnlyResources() {
|
||||
|
||||
@Test (timeout = 5000)
|
||||
public void testRunCommandWithCpuAndMemoryResources() {
|
||||
// Windows only test
|
||||
assumeTrue(Shell.WINDOWS);
|
||||
assumeWindows();
|
||||
int containerCores = 1;
|
||||
Configuration conf = new Configuration();
|
||||
conf.set(YarnConfiguration.NM_WINDOWS_CONTAINER_CPU_LIMIT_ENABLED, "true");
|
||||
|
@ -18,9 +18,9 @@
|
||||
|
||||
package org.apache.hadoop.yarn.server.nodemanager;
|
||||
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeNotWindows;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assume.assumeTrue;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Mockito.doAnswer;
|
||||
import static org.mockito.Mockito.mock;
|
||||
@ -123,7 +123,7 @@ private void setupMockExecutor(String executorPath, Configuration conf)
|
||||
|
||||
@Before
|
||||
public void setup() throws IOException, ContainerExecutionException {
|
||||
assumeTrue(!Path.WINDOWS);
|
||||
assumeNotWindows();
|
||||
|
||||
tmpMockExecutor = System.getProperty("test.build.data") +
|
||||
"/tmp-mock-container-executor";
|
||||
|
@ -18,6 +18,7 @@
|
||||
|
||||
package org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher;
|
||||
|
||||
import static org.apache.hadoop.test.PlatformAssumptions.assumeWindows;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertThat;
|
||||
import static org.junit.Assert.fail;
|
||||
@ -408,7 +409,7 @@ public void testContainerLaunchStdoutAndStderrDiagnostics() throws IOException {
|
||||
public void testPrependDistcache() throws Exception {
|
||||
|
||||
// Test is only relevant on Windows
|
||||
Assume.assumeTrue(Shell.WINDOWS);
|
||||
assumeWindows();
|
||||
|
||||
ContainerLaunchContext containerLaunchContext =
|
||||
recordFactory.newRecordInstance(ContainerLaunchContext.class);
|
||||
@ -1129,7 +1130,7 @@ public void testWindowsShellScriptBuilderCommand() throws IOException {
|
||||
String callCmd = "@call ";
|
||||
|
||||
// Test is only relevant on Windows
|
||||
Assume.assumeTrue(Shell.WINDOWS);
|
||||
assumeWindows();
|
||||
|
||||
// The tests are built on assuming 8191 max command line length
|
||||
assertEquals(8191, Shell.WINDOWS_MAX_SHELL_LENGTH);
|
||||
@ -1177,7 +1178,7 @@ public void testWindowsShellScriptBuilderCommand() throws IOException {
|
||||
@Test (timeout = 10000)
|
||||
public void testWindowsShellScriptBuilderEnv() throws IOException {
|
||||
// Test is only relevant on Windows
|
||||
Assume.assumeTrue(Shell.WINDOWS);
|
||||
assumeWindows();
|
||||
|
||||
// The tests are built on assuming 8191 max command line length
|
||||
assertEquals(8191, Shell.WINDOWS_MAX_SHELL_LENGTH);
|
||||
@ -1202,7 +1203,7 @@ public void testWindowsShellScriptBuilderMkdir() throws IOException {
|
||||
String mkDirCmd = "@if not exist \"\" mkdir \"\"";
|
||||
|
||||
// Test is only relevant on Windows
|
||||
Assume.assumeTrue(Shell.WINDOWS);
|
||||
assumeWindows();
|
||||
|
||||
// The tests are built on assuming 8191 max command line length
|
||||
assertEquals(8191, Shell.WINDOWS_MAX_SHELL_LENGTH);
|
||||
@ -1225,7 +1226,7 @@ public void testWindowsShellScriptBuilderMkdir() throws IOException {
|
||||
@Test (timeout = 10000)
|
||||
public void testWindowsShellScriptBuilderLink() throws IOException {
|
||||
// Test is only relevant on Windows
|
||||
Assume.assumeTrue(Shell.WINDOWS);
|
||||
assumeWindows();
|
||||
String linkCmd = "@" + Shell.getWinUtilsPath() + " symlink \"\" \"\"";
|
||||
|
||||
// The tests are built on assuming 8191 max command line length
|
||||
|
Loading…
Reference in New Issue
Block a user