From e6cda5819b1f4bbdcb12487260b1e3b787e11879 Mon Sep 17 00:00:00 2001
From: Mingliang Liu
Date: Tue, 14 Mar 2017 12:54:46 -0700
Subject: [PATCH] Revert "HADOOP-14170. FileSystemContractBaseTest is not
cleaning up test directory clearly. Contributed by Mingliang Liu"
This reverts commit b8c69557b7a23ff9c4c0b2c9d595338a08b873f1.
---
.../hadoop/fs/FileSystemContractBaseTest.java | 247 +++++++-----------
.../fs/TestRawLocalFileSystemContract.java | 24 +-
.../fs/s3a/ITestS3AFileSystemContract.java | 39 ++-
3 files changed, 132 insertions(+), 178 deletions(-)
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java
index 78ba1f928a..6247959182 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java
@@ -24,9 +24,8 @@
import junit.framework.TestCase;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
-
+import org.apache.commons.logging.Log;
+import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.security.AccessControlException;
@@ -46,8 +45,8 @@
*
*/
public abstract class FileSystemContractBaseTest extends TestCase {
- private static final Logger LOG =
- LoggerFactory.getLogger(FileSystemContractBaseTest.class);
+ private static final Log LOG =
+ LogFactory.getLog(FileSystemContractBaseTest.class);
protected final static String TEST_UMASK = "062";
protected FileSystem fs;
@@ -55,46 +54,15 @@ public abstract class FileSystemContractBaseTest extends TestCase {
@Override
protected void tearDown() throws Exception {
- if (fs != null) {
- // some cases use this absolute path
- if (rootDirTestEnabled()) {
- cleanupDir(path("/FileSystemContractBaseTest"));
- }
- // others use this relative path against test base directory
- cleanupDir(getTestBaseDir());
- }
- super.tearDown();
- }
-
- private void cleanupDir(Path p) {
try {
- LOG.info("Deleting " + p);
- fs.delete(p, true);
+ if (fs != null) {
+ fs.delete(path("/test"), true);
+ }
} catch (IOException e) {
- LOG.error("Error deleting test dir: " + p, e);
+ LOG.error("Error deleting /test: " + e, e);
}
}
-
- /**
- * Test base directory for resolving relative test paths.
- *
- * The default value is /user/$USER/FileSystemContractBaseTest. Subclass may
- * set specific test base directory.
- */
- protected Path getTestBaseDir() {
- return new Path(fs.getWorkingDirectory(), "FileSystemContractBaseTest");
- }
-
- /**
- * For absolute path return the fully qualified path while for relative path
- * return the fully qualified path against {@link #getTestBaseDir()}.
- */
- protected final Path path(String pathString) {
- Path p = new Path(pathString).makeQualified(fs.getUri(), getTestBaseDir());
- LOG.info("Resolving {} -> {}", pathString, p);
- return p;
- }
-
+
protected int getBlockSize() {
return 1024;
}
@@ -112,17 +80,6 @@ protected boolean renameSupported() {
return true;
}
- /**
- * Override this if the filesystem does not enable testing root directories.
- *
- * If this returns true, the test will create and delete test directories and
- * files under root directory, which may have side effects, e.g. fail tests
- * with PermissionDenied exceptions.
- */
- protected boolean rootDirTestEnabled() {
- return true;
- }
-
/**
* Override this if the filesystem is not case sensitive
* @return true if the case detection/preservation tests should run
@@ -145,24 +102,24 @@ public void testWorkingDirectory() throws Exception {
Path workDir = path(getDefaultWorkingDirectory());
assertEquals(workDir, fs.getWorkingDirectory());
- fs.setWorkingDirectory(fs.makeQualified(new Path(".")));
+ fs.setWorkingDirectory(path("."));
assertEquals(workDir, fs.getWorkingDirectory());
- fs.setWorkingDirectory(fs.makeQualified(new Path("..")));
+ fs.setWorkingDirectory(path(".."));
assertEquals(workDir.getParent(), fs.getWorkingDirectory());
- Path relativeDir = fs.makeQualified(new Path("testWorkingDirectory"));
+ Path relativeDir = path("hadoop");
fs.setWorkingDirectory(relativeDir);
assertEquals(relativeDir, fs.getWorkingDirectory());
- Path absoluteDir = path("/FileSystemContractBaseTest/testWorkingDirectory");
+ Path absoluteDir = path("/test/hadoop");
fs.setWorkingDirectory(absoluteDir);
assertEquals(absoluteDir, fs.getWorkingDirectory());
}
public void testMkdirs() throws Exception {
- Path testDir = path("testMkdirs");
+ Path testDir = path("/test/hadoop");
assertFalse(fs.exists(testDir));
assertFalse(fs.isFile(testDir));
@@ -188,15 +145,14 @@ public void testMkdirs() throws Exception {
}
public void testMkdirsFailsForSubdirectoryOfExistingFile() throws Exception {
- Path testDir = path("testMkdirsFailsForSubdirectoryOfExistingFile");
+ Path testDir = path("/test/hadoop");
assertFalse(fs.exists(testDir));
assertTrue(fs.mkdirs(testDir));
assertTrue(fs.exists(testDir));
- createFile(path("testMkdirsFailsForSubdirectoryOfExistingFile/file"));
+ createFile(path("/test/hadoop/file"));
- Path testSubDir = path(
- "testMkdirsFailsForSubdirectoryOfExistingFile/file/subdir");
+ Path testSubDir = path("/test/hadoop/file/subdir");
try {
fs.mkdirs(testSubDir);
fail("Should throw IOException.");
@@ -211,8 +167,7 @@ public void testMkdirsFailsForSubdirectoryOfExistingFile() throws Exception {
// file missing execute permission.
}
- Path testDeepSubDir = path(
- "testMkdirsFailsForSubdirectoryOfExistingFile/file/deep/sub/dir");
+ Path testDeepSubDir = path("/test/hadoop/file/deep/sub/dir");
try {
fs.mkdirs(testDeepSubDir);
fail("Should throw IOException.");
@@ -235,7 +190,7 @@ public void testMkdirsWithUmask() throws Exception {
String oldUmask = conf.get(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY);
try {
conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, TEST_UMASK);
- final Path dir = path("newDir");
+ final Path dir = path("/test/newDir");
assertTrue(fs.mkdirs(dir, new FsPermission((short) 0777)));
FileStatus status = fs.getFileStatus(dir);
assertTrue(status.isDirectory());
@@ -268,8 +223,7 @@ private boolean isS3(FileSystem fileSystem) {
public void testGetFileStatusThrowsExceptionForNonExistentFile()
throws Exception {
try {
- fs.getFileStatus(
- path("testGetFileStatusThrowsExceptionForNonExistentFile/file"));
+ fs.getFileStatus(path("/test/hadoop/file"));
fail("Should throw FileNotFoundException");
} catch (FileNotFoundException e) {
// expected
@@ -278,8 +232,7 @@ public void testGetFileStatusThrowsExceptionForNonExistentFile()
public void testListStatusThrowsExceptionForNonExistentFile() throws Exception {
try {
- fs.listStatus(
- path("testListStatusThrowsExceptionForNonExistentFile/file"));
+ fs.listStatus(path("/test/hadoop/file"));
fail("Should throw FileNotFoundException");
} catch (FileNotFoundException fnfe) {
// expected
@@ -287,32 +240,30 @@ public void testListStatusThrowsExceptionForNonExistentFile() throws Exception {
}
public void testListStatus() throws Exception {
- final Path[] testDirs = {
- path("testListStatus/a"),
- path("testListStatus/b"),
- path("testListStatus/c/1")
- };
+ Path[] testDirs = { path("/test/hadoop/a"),
+ path("/test/hadoop/b"),
+ path("/test/hadoop/c/1"), };
assertFalse(fs.exists(testDirs[0]));
for (Path path : testDirs) {
assertTrue(fs.mkdirs(path));
}
- FileStatus[] paths = fs.listStatus(path("."));
+ FileStatus[] paths = fs.listStatus(path("/test"));
assertEquals(1, paths.length);
- assertEquals(path("testListStatus"), paths[0].getPath());
+ assertEquals(path("/test/hadoop"), paths[0].getPath());
- paths = fs.listStatus(path("testListStatus"));
+ paths = fs.listStatus(path("/test/hadoop"));
assertEquals(3, paths.length);
ArrayList list = new ArrayList();
for (FileStatus fileState : paths) {
list.add(fileState.getPath());
}
- assertTrue(list.contains(path("testListStatus/a")));
- assertTrue(list.contains(path("testListStatus/b")));
- assertTrue(list.contains(path("testListStatus/c")));
+ assertTrue(list.contains(path("/test/hadoop/a")));
+ assertTrue(list.contains(path("/test/hadoop/b")));
+ assertTrue(list.contains(path("/test/hadoop/c")));
- paths = fs.listStatus(path("testListStatus/a"));
+ paths = fs.listStatus(path("/test/hadoop/a"));
assertEquals(0, paths.length);
}
@@ -343,12 +294,12 @@ public void testWriteReadAndDeleteTwoBlocks() throws Exception {
* @throws IOException on IO failures
*/
protected void writeReadAndDelete(int len) throws IOException {
- Path path = path("writeReadAndDelete/file");
+ Path path = path("/test/hadoop/file");
writeAndRead(path, data, len, false, true);
}
public void testOverwrite() throws IOException {
- Path path = path("testOverwrite/file");
+ Path path = path("/test/hadoop/file");
fs.mkdirs(path.getParent());
@@ -374,7 +325,7 @@ public void testOverwrite() throws IOException {
}
public void testWriteInNonExistentDirectory() throws IOException {
- Path path = path("testWriteInNonExistentDirectory/file");
+ Path path = path("/test/hadoop/file");
assertFalse("Parent exists", fs.exists(path.getParent()));
createFile(path);
@@ -384,15 +335,15 @@ public void testWriteInNonExistentDirectory() throws IOException {
}
public void testDeleteNonExistentFile() throws IOException {
- Path path = path("testDeleteNonExistentFile/file");
+ Path path = path("/test/hadoop/file");
assertFalse("Path exists: " + path, fs.exists(path));
assertFalse("No deletion", fs.delete(path, true));
}
public void testDeleteRecursively() throws IOException {
- Path dir = path("testDeleteRecursively");
- Path file = path("testDeleteRecursively/file");
- Path subdir = path("testDeleteRecursively/subdir");
+ Path dir = path("/test/hadoop");
+ Path file = path("/test/hadoop/file");
+ Path subdir = path("/test/hadoop/subdir");
createFile(file);
assertTrue("Created subdir", fs.mkdirs(subdir));
@@ -418,7 +369,7 @@ public void testDeleteRecursively() throws IOException {
}
public void testDeleteEmptyDirectory() throws IOException {
- Path dir = path("testDeleteEmptyDirectory");
+ Path dir = path("/test/hadoop");
assertTrue(fs.mkdirs(dir));
assertTrue("Dir exists", fs.exists(dir));
assertTrue("Deleted", fs.delete(dir, false));
@@ -428,26 +379,26 @@ public void testDeleteEmptyDirectory() throws IOException {
public void testRenameNonExistentPath() throws Exception {
if (!renameSupported()) return;
- Path src = path("testRenameNonExistentPath/path");
- Path dst = path("testRenameNonExistentPathNew/newpath");
+ Path src = path("/test/hadoop/path");
+ Path dst = path("/test/new/newpath");
rename(src, dst, false, false, false);
}
public void testRenameFileMoveToNonExistentDirectory() throws Exception {
if (!renameSupported()) return;
- Path src = path("testRenameFileMoveToNonExistentDirectory/file");
+ Path src = path("/test/hadoop/file");
createFile(src);
- Path dst = path("testRenameFileMoveToNonExistentDirectoryNew/newfile");
+ Path dst = path("/test/new/newfile");
rename(src, dst, false, true, false);
}
public void testRenameFileMoveToExistingDirectory() throws Exception {
if (!renameSupported()) return;
- Path src = path("testRenameFileMoveToExistingDirectory/file");
+ Path src = path("/test/hadoop/file");
createFile(src);
- Path dst = path("testRenameFileMoveToExistingDirectoryNew/newfile");
+ Path dst = path("/test/new/newfile");
fs.mkdirs(dst.getParent());
rename(src, dst, true, false, true);
}
@@ -455,9 +406,9 @@ public void testRenameFileMoveToExistingDirectory() throws Exception {
public void testRenameFileAsExistingFile() throws Exception {
if (!renameSupported()) return;
- Path src = path("testRenameFileAsExistingFile/file");
+ Path src = path("/test/hadoop/file");
createFile(src);
- Path dst = path("testRenameFileAsExistingFileNew/newfile");
+ Path dst = path("/test/new/newfile");
createFile(dst);
rename(src, dst, false, true, true);
}
@@ -465,84 +416,83 @@ public void testRenameFileAsExistingFile() throws Exception {
public void testRenameFileAsExistingDirectory() throws Exception {
if (!renameSupported()) return;
- Path src = path("testRenameFileAsExistingDirectory/file");
+ Path src = path("/test/hadoop/file");
createFile(src);
- Path dst = path("testRenameFileAsExistingDirectoryNew/newdir");
+ Path dst = path("/test/new/newdir");
fs.mkdirs(dst);
rename(src, dst, true, false, true);
- assertIsFile(path("testRenameFileAsExistingDirectoryNew/newdir/file"));
+ assertIsFile(path("/test/new/newdir/file"));
}
public void testRenameDirectoryMoveToNonExistentDirectory()
throws Exception {
if (!renameSupported()) return;
- Path src = path("testRenameDirectoryMoveToNonExistentDirectory/dir");
+ Path src = path("/test/hadoop/dir");
fs.mkdirs(src);
- Path dst = path("testRenameDirectoryMoveToNonExistentDirectoryNew/newdir");
+ Path dst = path("/test/new/newdir");
rename(src, dst, false, true, false);
}
public void testRenameDirectoryMoveToExistingDirectory() throws Exception {
if (!renameSupported()) return;
- final String testDir = "testRenameDirectoryMoveToExistingDirectory";
- Path src = path(testDir + "/dir");
+ Path src = path("/test/hadoop/dir");
fs.mkdirs(src);
- createFile(path(testDir + "/dir/file1"));
- createFile(path(testDir + "/dir/subdir/file2"));
+ createFile(path("/test/hadoop/dir/file1"));
+ createFile(path("/test/hadoop/dir/subdir/file2"));
- Path dst = path(testDir + "New/newdir");
+ Path dst = path("/test/new/newdir");
fs.mkdirs(dst.getParent());
rename(src, dst, true, false, true);
assertFalse("Nested file1 exists",
- fs.exists(path(testDir + "/dir/file1")));
+ fs.exists(path("/test/hadoop/dir/file1")));
assertFalse("Nested file2 exists",
- fs.exists(path(testDir + "/dir/subdir/file2")));
+ fs.exists(path("/test/hadoop/dir/subdir/file2")));
assertTrue("Renamed nested file1 exists",
- fs.exists(path(testDir + "New/newdir/file1")));
+ fs.exists(path("/test/new/newdir/file1")));
assertTrue("Renamed nested exists",
- fs.exists(path(testDir + "New/newdir/subdir/file2")));
+ fs.exists(path("/test/new/newdir/subdir/file2")));
}
public void testRenameDirectoryAsExistingFile() throws Exception {
if (!renameSupported()) return;
- Path src = path("testRenameDirectoryAsExistingFile/dir");
+ Path src = path("/test/hadoop/dir");
fs.mkdirs(src);
- Path dst = path("testRenameDirectoryAsExistingFileNew/newfile");
+ Path dst = path("/test/new/newfile");
createFile(dst);
rename(src, dst, false, true, true);
}
public void testRenameDirectoryAsExistingDirectory() throws Exception {
if (!renameSupported()) return;
- final String testDir = "testRenameDirectoryAsExistingDirectory";
- Path src = path(testDir + "/dir");
+
+ Path src = path("/test/hadoop/dir");
fs.mkdirs(src);
- createFile(path(testDir + "/file1"));
- createFile(path(testDir + "/subdir/file2"));
-
- Path dst = path(testDir + "New/newdir");
+ createFile(path("/test/hadoop/dir/file1"));
+ createFile(path("/test/hadoop/dir/subdir/file2"));
+
+ Path dst = path("/test/new/newdir");
fs.mkdirs(dst);
rename(src, dst, true, false, true);
assertTrue("Destination changed",
- fs.exists(path(testDir + "New/newdir/dir")));
+ fs.exists(path("/test/new/newdir/dir")));
assertFalse("Nested file1 exists",
- fs.exists(path(testDir + "/dir/file1")));
+ fs.exists(path("/test/hadoop/dir/file1")));
assertFalse("Nested file2 exists",
- fs.exists(path(testDir + "/dir/subdir/file2")));
+ fs.exists(path("/test/hadoop/dir/subdir/file2")));
assertTrue("Renamed nested file1 exists",
- fs.exists(path(testDir + "New/newdir/dir/file1")));
+ fs.exists(path("/test/new/newdir/dir/file1")));
assertTrue("Renamed nested exists",
- fs.exists(path(testDir + "New/newdir/dir/subdir/file2")));
+ fs.exists(path("/test/new/newdir/dir/subdir/file2")));
}
public void testInputStreamClosedTwice() throws IOException {
//HADOOP-4760 according to Closeable#close() closing already-closed
//streams should have no effect.
- Path src = path("testInputStreamClosedTwice/file");
+ Path src = path("/test/hadoop/file");
createFile(src);
FSDataInputStream in = fs.open(src);
in.close();
@@ -552,13 +502,18 @@ public void testInputStreamClosedTwice() throws IOException {
public void testOutputStreamClosedTwice() throws IOException {
//HADOOP-4760 according to Closeable#close() closing already-closed
//streams should have no effect.
- Path src = path("testOutputStreamClosedTwice/file");
+ Path src = path("/test/hadoop/file");
FSDataOutputStream out = fs.create(src);
out.writeChar('H'); //write some data
out.close();
out.close();
}
-
+
+ protected Path path(String pathString) {
+ return new Path(pathString).makeQualified(fs.getUri(),
+ fs.getWorkingDirectory());
+ }
+
protected void createFile(Path path) throws IOException {
FSDataOutputStream out = fs.create(path);
out.write(data, 0, data.length);
@@ -586,7 +541,7 @@ public void testOverWriteAndRead() throws Exception {
byte[] filedata1 = dataset(blockSize * 2, 'A', 26);
byte[] filedata2 = dataset(blockSize * 2, 'a', 26);
- Path path = path("testOverWriteAndRead/file-overwrite");
+ Path path = path("/test/hadoop/file-overwrite");
writeAndRead(path, filedata1, blockSize, true, false);
writeAndRead(path, filedata2, blockSize, true, false);
writeAndRead(path, filedata1, blockSize * 2, true, false);
@@ -606,7 +561,7 @@ public void testFilesystemIsCaseSensitive() throws Exception {
LOG.info("Skipping test");
return;
}
- String mixedCaseFilename = "testFilesystemIsCaseSensitive";
+ String mixedCaseFilename = "/test/UPPER.TXT";
Path upper = path(mixedCaseFilename);
Path lower = path(StringUtils.toLowerCase(mixedCaseFilename));
assertFalse("File exists" + upper, fs.exists(upper));
@@ -637,7 +592,7 @@ public void testFilesystemIsCaseSensitive() throws Exception {
* @throws Exception on failures
*/
public void testZeroByteFilesAreFiles() throws Exception {
- Path src = path("testZeroByteFilesAreFiles");
+ Path src = path("/test/testZeroByteFilesAreFiles");
//create a zero byte file
FSDataOutputStream out = fs.create(src);
out.close();
@@ -650,7 +605,7 @@ public void testZeroByteFilesAreFiles() throws Exception {
* @throws Exception on failures
*/
public void testMultiByteFilesAreFiles() throws Exception {
- Path src = path("testMultiByteFilesAreFiles");
+ Path src = path("/test/testMultiByteFilesAreFiles");
FSDataOutputStream out = fs.create(src);
out.writeUTF("testMultiByteFilesAreFiles");
out.close();
@@ -674,14 +629,10 @@ public void testRootDirAlwaysExists() throws Exception {
* @throws Exception on failures
*/
public void testRenameRootDirForbidden() throws Exception {
- if (!rootDirTestEnabled()) {
- return;
- }
-
if (!renameSupported()) return;
rename(path("/"),
- path("testRenameRootDirForbidden"),
+ path("/test/newRootDir"),
false, true, false);
}
@@ -693,7 +644,7 @@ public void testRenameRootDirForbidden() throws Exception {
public void testRenameChildDirForbidden() throws Exception {
if (!renameSupported()) return;
LOG.info("testRenameChildDirForbidden");
- Path parentdir = path("testRenameChildDirForbidden");
+ Path parentdir = path("/test/parentdir");
fs.mkdirs(parentdir);
Path childFile = new Path(parentdir, "childfile");
createFile(childFile);
@@ -712,9 +663,9 @@ public void testRenameChildDirForbidden() throws Exception {
*/
public void testRenameToDirWithSamePrefixAllowed() throws Throwable {
if (!renameSupported()) return;
- final Path parentdir = path("testRenameToDirWithSamePrefixAllowed");
+ Path parentdir = path("test/parentdir");
fs.mkdirs(parentdir);
- final Path dest = path("testRenameToDirWithSamePrefixAllowedDest");
+ Path dest = path("test/parentdirdest");
rename(parentdir, dest, true, false, true);
}
@@ -726,7 +677,7 @@ public void testRenameDirToSelf() throws Throwable {
if (!renameSupported()) {
return;
}
- Path parentdir = path("testRenameDirToSelf");
+ Path parentdir = path("test/parentdir");
fs.mkdirs(parentdir);
Path child = new Path(parentdir, "child");
createFile(child);
@@ -745,7 +696,7 @@ public void testMoveDirUnderParent() throws Throwable {
if (!renameSupported()) {
return;
}
- Path testdir = path("testMoveDirUnderParent");
+ Path testdir = path("test/dir");
fs.mkdirs(testdir);
Path parent = testdir.getParent();
//the outcome here is ambiguous, so is not checked
@@ -760,7 +711,7 @@ public void testMoveDirUnderParent() throws Throwable {
*/
public void testRenameFileToSelf() throws Throwable {
if (!renameSupported()) return;
- Path filepath = path("testRenameFileToSelf");
+ Path filepath = path("test/file");
createFile(filepath);
//HDFS expects rename src, src -> true
rename(filepath, filepath, true, true, true);
@@ -774,7 +725,7 @@ public void testRenameFileToSelf() throws Throwable {
*/
public void testMoveFileUnderParent() throws Throwable {
if (!renameSupported()) return;
- Path filepath = path("testMoveFileUnderParent");
+ Path filepath = path("test/file");
createFile(filepath);
//HDFS expects rename src, src -> true
rename(filepath, filepath, true, true, true);
@@ -783,23 +734,15 @@ public void testMoveFileUnderParent() throws Throwable {
}
public void testLSRootDir() throws Throwable {
- if (!rootDirTestEnabled()) {
- return;
- }
-
Path dir = path("/");
- Path child = path("/FileSystemContractBaseTest");
+ Path child = path("/test");
createFile(child);
assertListFilesFinds(dir, child);
}
public void testListStatusRootDir() throws Throwable {
- if (!rootDirTestEnabled()) {
- return;
- }
-
Path dir = path("/");
- Path child = path("/FileSystemContractBaseTest");
+ Path child = path("/test");
createFile(child);
assertListStatusFinds(dir, child);
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestRawLocalFileSystemContract.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestRawLocalFileSystemContract.java
index b023c091d3..036fb6af97 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestRawLocalFileSystemContract.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestRawLocalFileSystemContract.java
@@ -33,8 +33,6 @@ public class TestRawLocalFileSystemContract extends FileSystemContractBaseTest {
private static final Logger LOG =
LoggerFactory.getLogger(TestRawLocalFileSystemContract.class);
- private final static Path TEST_BASE_DIR =
- new Path(GenericTestUtils.getTempPath(""));
@Before
public void setUp() throws Exception {
@@ -53,25 +51,21 @@ protected boolean renameSupported() {
return false;
}
- /**
- * Disabling testing root operation.
- *
- * Writing to root directory on the local file system may get permission
- * denied exception, or even worse, delete/overwrite files accidentally.
- */
- @Override
- protected boolean rootDirTestEnabled() {
- return false;
- }
-
@Override
public String getDefaultWorkingDirectory() {
return fs.getWorkingDirectory().toUri().getPath();
}
@Override
- protected Path getTestBaseDir() {
- return TEST_BASE_DIR;
+ protected Path path(String pathString) {
+ // For testWorkingDirectory
+ if (pathString.equals(getDefaultWorkingDirectory()) ||
+ pathString.equals(".") || pathString.equals("..")) {
+ return super.path(pathString);
+ }
+
+ return new Path(GenericTestUtils.getTempPath(pathString)).
+ makeQualified(fs.getUri(), fs.getWorkingDirectory());
}
@Override
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileSystemContract.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileSystemContract.java
index 5d47e47feb..f39ec81a79 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileSystemContract.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileSystemContract.java
@@ -57,13 +57,30 @@ public void setUp() throws Exception {
fs = S3ATestUtils.createTestFileSystem(conf);
basePath = fs.makeQualified(
- S3ATestUtils.createTestPath(new Path("s3afilesystemcontract")));
+ S3ATestUtils.createTestPath(new Path("/s3afilesystemcontract")));
super.setUp();
}
+ /**
+ * This path explicitly places all absolute paths under the per-test suite
+ * path directory; this allows the test to run in parallel.
+ * @param pathString path string as input
+ * @return a qualified path string.
+ */
+ protected Path path(String pathString) {
+ if (pathString.startsWith("/")) {
+ return fs.makeQualified(new Path(basePath, pathString));
+ } else {
+ return super.path(pathString);
+ }
+ }
+
@Override
- public Path getTestBaseDir() {
- return basePath;
+ protected void tearDown() throws Exception {
+ if (fs != null) {
+ fs.delete(basePath, true);
+ }
+ super.tearDown();
}
@Override
@@ -77,22 +94,22 @@ public void testRenameDirectoryAsExistingDirectory() throws Exception {
return;
}
- Path src = path("testRenameDirectoryAsExisting/dir");
+ Path src = path("/test/hadoop/dir");
fs.mkdirs(src);
- createFile(path("testRenameDirectoryAsExisting/dir/file1"));
- createFile(path("testRenameDirectoryAsExisting/dir/subdir/file2"));
+ createFile(path("/test/hadoop/dir/file1"));
+ createFile(path("/test/hadoop/dir/subdir/file2"));
- Path dst = path("testRenameDirectoryAsExisting/newdir");
+ Path dst = path("/test/new/newdir");
fs.mkdirs(dst);
rename(src, dst, true, false, true);
assertFalse("Nested file1 exists",
- fs.exists(path("testRenameDirectoryAsExisting/dir/file1")));
+ fs.exists(path("/test/hadoop/dir/file1")));
assertFalse("Nested file2 exists",
- fs.exists(path("testRenameDirectoryAsExisting/dir/subdir/file2")));
+ fs.exists(path("/test/hadoop/dir/subdir/file2")));
assertTrue("Renamed nested file1 exists",
- fs.exists(path("testRenameDirectoryAsExisting/newdir/file1")));
+ fs.exists(path("/test/new/newdir/file1")));
assertTrue("Renamed nested exists",
- fs.exists(path("testRenameDirectoryAsExisting/newdir/subdir/file2")));
+ fs.exists(path("/test/new/newdir/subdir/file2")));
}
// @Override