diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java
index 6247959182..78ba1f928a 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java
@@ -24,8 +24,9 @@
import junit.framework.TestCase;
-import org.apache.commons.logging.Log;
-import org.apache.commons.logging.LogFactory;
+import org.slf4j.Logger;
+import org.slf4j.LoggerFactory;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.security.AccessControlException;
@@ -45,8 +46,8 @@
*
*/
public abstract class FileSystemContractBaseTest extends TestCase {
- private static final Log LOG =
- LogFactory.getLog(FileSystemContractBaseTest.class);
+ private static final Logger LOG =
+ LoggerFactory.getLogger(FileSystemContractBaseTest.class);
protected final static String TEST_UMASK = "062";
protected FileSystem fs;
@@ -54,15 +55,46 @@ public abstract class FileSystemContractBaseTest extends TestCase {
@Override
protected void tearDown() throws Exception {
- try {
- if (fs != null) {
- fs.delete(path("/test"), true);
+ if (fs != null) {
+ // some cases use this absolute path
+ if (rootDirTestEnabled()) {
+ cleanupDir(path("/FileSystemContractBaseTest"));
}
+ // others use this relative path against test base directory
+ cleanupDir(getTestBaseDir());
+ }
+ super.tearDown();
+ }
+
+ private void cleanupDir(Path p) {
+ try {
+ LOG.info("Deleting " + p);
+ fs.delete(p, true);
} catch (IOException e) {
- LOG.error("Error deleting /test: " + e, e);
+ LOG.error("Error deleting test dir: " + p, e);
}
}
-
+
+ /**
+ * Test base directory for resolving relative test paths.
+ *
+ * The default value is /user/$USER/FileSystemContractBaseTest. Subclass may
+ * set specific test base directory.
+ */
+ protected Path getTestBaseDir() {
+ return new Path(fs.getWorkingDirectory(), "FileSystemContractBaseTest");
+ }
+
+ /**
+ * For absolute path return the fully qualified path while for relative path
+ * return the fully qualified path against {@link #getTestBaseDir()}.
+ */
+ protected final Path path(String pathString) {
+ Path p = new Path(pathString).makeQualified(fs.getUri(), getTestBaseDir());
+ LOG.info("Resolving {} -> {}", pathString, p);
+ return p;
+ }
+
protected int getBlockSize() {
return 1024;
}
@@ -80,6 +112,17 @@ protected boolean renameSupported() {
return true;
}
+ /**
+ * Override this if the filesystem does not enable testing root directories.
+ *
+ * If this returns true, the test will create and delete test directories and
+ * files under root directory, which may have side effects, e.g. fail tests
+ * with PermissionDenied exceptions.
+ */
+ protected boolean rootDirTestEnabled() {
+ return true;
+ }
+
/**
* Override this if the filesystem is not case sensitive
* @return true if the case detection/preservation tests should run
@@ -102,24 +145,24 @@ public void testWorkingDirectory() throws Exception {
Path workDir = path(getDefaultWorkingDirectory());
assertEquals(workDir, fs.getWorkingDirectory());
- fs.setWorkingDirectory(path("."));
+ fs.setWorkingDirectory(fs.makeQualified(new Path(".")));
assertEquals(workDir, fs.getWorkingDirectory());
- fs.setWorkingDirectory(path(".."));
+ fs.setWorkingDirectory(fs.makeQualified(new Path("..")));
assertEquals(workDir.getParent(), fs.getWorkingDirectory());
- Path relativeDir = path("hadoop");
+ Path relativeDir = fs.makeQualified(new Path("testWorkingDirectory"));
fs.setWorkingDirectory(relativeDir);
assertEquals(relativeDir, fs.getWorkingDirectory());
- Path absoluteDir = path("/test/hadoop");
+ Path absoluteDir = path("/FileSystemContractBaseTest/testWorkingDirectory");
fs.setWorkingDirectory(absoluteDir);
assertEquals(absoluteDir, fs.getWorkingDirectory());
}
public void testMkdirs() throws Exception {
- Path testDir = path("/test/hadoop");
+ Path testDir = path("testMkdirs");
assertFalse(fs.exists(testDir));
assertFalse(fs.isFile(testDir));
@@ -145,14 +188,15 @@ public void testMkdirs() throws Exception {
}
public void testMkdirsFailsForSubdirectoryOfExistingFile() throws Exception {
- Path testDir = path("/test/hadoop");
+ Path testDir = path("testMkdirsFailsForSubdirectoryOfExistingFile");
assertFalse(fs.exists(testDir));
assertTrue(fs.mkdirs(testDir));
assertTrue(fs.exists(testDir));
- createFile(path("/test/hadoop/file"));
+ createFile(path("testMkdirsFailsForSubdirectoryOfExistingFile/file"));
- Path testSubDir = path("/test/hadoop/file/subdir");
+ Path testSubDir = path(
+ "testMkdirsFailsForSubdirectoryOfExistingFile/file/subdir");
try {
fs.mkdirs(testSubDir);
fail("Should throw IOException.");
@@ -167,7 +211,8 @@ public void testMkdirsFailsForSubdirectoryOfExistingFile() throws Exception {
// file missing execute permission.
}
- Path testDeepSubDir = path("/test/hadoop/file/deep/sub/dir");
+ Path testDeepSubDir = path(
+ "testMkdirsFailsForSubdirectoryOfExistingFile/file/deep/sub/dir");
try {
fs.mkdirs(testDeepSubDir);
fail("Should throw IOException.");
@@ -190,7 +235,7 @@ public void testMkdirsWithUmask() throws Exception {
String oldUmask = conf.get(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY);
try {
conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, TEST_UMASK);
- final Path dir = path("/test/newDir");
+ final Path dir = path("newDir");
assertTrue(fs.mkdirs(dir, new FsPermission((short) 0777)));
FileStatus status = fs.getFileStatus(dir);
assertTrue(status.isDirectory());
@@ -223,7 +268,8 @@ private boolean isS3(FileSystem fileSystem) {
public void testGetFileStatusThrowsExceptionForNonExistentFile()
throws Exception {
try {
- fs.getFileStatus(path("/test/hadoop/file"));
+ fs.getFileStatus(
+ path("testGetFileStatusThrowsExceptionForNonExistentFile/file"));
fail("Should throw FileNotFoundException");
} catch (FileNotFoundException e) {
// expected
@@ -232,7 +278,8 @@ public void testGetFileStatusThrowsExceptionForNonExistentFile()
public void testListStatusThrowsExceptionForNonExistentFile() throws Exception {
try {
- fs.listStatus(path("/test/hadoop/file"));
+ fs.listStatus(
+ path("testListStatusThrowsExceptionForNonExistentFile/file"));
fail("Should throw FileNotFoundException");
} catch (FileNotFoundException fnfe) {
// expected
@@ -240,30 +287,32 @@ public void testListStatusThrowsExceptionForNonExistentFile() throws Exception {
}
public void testListStatus() throws Exception {
- Path[] testDirs = { path("/test/hadoop/a"),
- path("/test/hadoop/b"),
- path("/test/hadoop/c/1"), };
+ final Path[] testDirs = {
+ path("testListStatus/a"),
+ path("testListStatus/b"),
+ path("testListStatus/c/1")
+ };
assertFalse(fs.exists(testDirs[0]));
for (Path path : testDirs) {
assertTrue(fs.mkdirs(path));
}
- FileStatus[] paths = fs.listStatus(path("/test"));
+ FileStatus[] paths = fs.listStatus(path("."));
assertEquals(1, paths.length);
- assertEquals(path("/test/hadoop"), paths[0].getPath());
+ assertEquals(path("testListStatus"), paths[0].getPath());
- paths = fs.listStatus(path("/test/hadoop"));
+ paths = fs.listStatus(path("testListStatus"));
assertEquals(3, paths.length);
ArrayList list = new ArrayList();
for (FileStatus fileState : paths) {
list.add(fileState.getPath());
}
- assertTrue(list.contains(path("/test/hadoop/a")));
- assertTrue(list.contains(path("/test/hadoop/b")));
- assertTrue(list.contains(path("/test/hadoop/c")));
+ assertTrue(list.contains(path("testListStatus/a")));
+ assertTrue(list.contains(path("testListStatus/b")));
+ assertTrue(list.contains(path("testListStatus/c")));
- paths = fs.listStatus(path("/test/hadoop/a"));
+ paths = fs.listStatus(path("testListStatus/a"));
assertEquals(0, paths.length);
}
@@ -294,12 +343,12 @@ public void testWriteReadAndDeleteTwoBlocks() throws Exception {
* @throws IOException on IO failures
*/
protected void writeReadAndDelete(int len) throws IOException {
- Path path = path("/test/hadoop/file");
+ Path path = path("writeReadAndDelete/file");
writeAndRead(path, data, len, false, true);
}
public void testOverwrite() throws IOException {
- Path path = path("/test/hadoop/file");
+ Path path = path("testOverwrite/file");
fs.mkdirs(path.getParent());
@@ -325,7 +374,7 @@ public void testOverwrite() throws IOException {
}
public void testWriteInNonExistentDirectory() throws IOException {
- Path path = path("/test/hadoop/file");
+ Path path = path("testWriteInNonExistentDirectory/file");
assertFalse("Parent exists", fs.exists(path.getParent()));
createFile(path);
@@ -335,15 +384,15 @@ public void testWriteInNonExistentDirectory() throws IOException {
}
public void testDeleteNonExistentFile() throws IOException {
- Path path = path("/test/hadoop/file");
+ Path path = path("testDeleteNonExistentFile/file");
assertFalse("Path exists: " + path, fs.exists(path));
assertFalse("No deletion", fs.delete(path, true));
}
public void testDeleteRecursively() throws IOException {
- Path dir = path("/test/hadoop");
- Path file = path("/test/hadoop/file");
- Path subdir = path("/test/hadoop/subdir");
+ Path dir = path("testDeleteRecursively");
+ Path file = path("testDeleteRecursively/file");
+ Path subdir = path("testDeleteRecursively/subdir");
createFile(file);
assertTrue("Created subdir", fs.mkdirs(subdir));
@@ -369,7 +418,7 @@ public void testDeleteRecursively() throws IOException {
}
public void testDeleteEmptyDirectory() throws IOException {
- Path dir = path("/test/hadoop");
+ Path dir = path("testDeleteEmptyDirectory");
assertTrue(fs.mkdirs(dir));
assertTrue("Dir exists", fs.exists(dir));
assertTrue("Deleted", fs.delete(dir, false));
@@ -379,26 +428,26 @@ public void testDeleteEmptyDirectory() throws IOException {
public void testRenameNonExistentPath() throws Exception {
if (!renameSupported()) return;
- Path src = path("/test/hadoop/path");
- Path dst = path("/test/new/newpath");
+ Path src = path("testRenameNonExistentPath/path");
+ Path dst = path("testRenameNonExistentPathNew/newpath");
rename(src, dst, false, false, false);
}
public void testRenameFileMoveToNonExistentDirectory() throws Exception {
if (!renameSupported()) return;
- Path src = path("/test/hadoop/file");
+ Path src = path("testRenameFileMoveToNonExistentDirectory/file");
createFile(src);
- Path dst = path("/test/new/newfile");
+ Path dst = path("testRenameFileMoveToNonExistentDirectoryNew/newfile");
rename(src, dst, false, true, false);
}
public void testRenameFileMoveToExistingDirectory() throws Exception {
if (!renameSupported()) return;
- Path src = path("/test/hadoop/file");
+ Path src = path("testRenameFileMoveToExistingDirectory/file");
createFile(src);
- Path dst = path("/test/new/newfile");
+ Path dst = path("testRenameFileMoveToExistingDirectoryNew/newfile");
fs.mkdirs(dst.getParent());
rename(src, dst, true, false, true);
}
@@ -406,9 +455,9 @@ public void testRenameFileMoveToExistingDirectory() throws Exception {
public void testRenameFileAsExistingFile() throws Exception {
if (!renameSupported()) return;
- Path src = path("/test/hadoop/file");
+ Path src = path("testRenameFileAsExistingFile/file");
createFile(src);
- Path dst = path("/test/new/newfile");
+ Path dst = path("testRenameFileAsExistingFileNew/newfile");
createFile(dst);
rename(src, dst, false, true, true);
}
@@ -416,83 +465,84 @@ public void testRenameFileAsExistingFile() throws Exception {
public void testRenameFileAsExistingDirectory() throws Exception {
if (!renameSupported()) return;
- Path src = path("/test/hadoop/file");
+ Path src = path("testRenameFileAsExistingDirectory/file");
createFile(src);
- Path dst = path("/test/new/newdir");
+ Path dst = path("testRenameFileAsExistingDirectoryNew/newdir");
fs.mkdirs(dst);
rename(src, dst, true, false, true);
- assertIsFile(path("/test/new/newdir/file"));
+ assertIsFile(path("testRenameFileAsExistingDirectoryNew/newdir/file"));
}
public void testRenameDirectoryMoveToNonExistentDirectory()
throws Exception {
if (!renameSupported()) return;
- Path src = path("/test/hadoop/dir");
+ Path src = path("testRenameDirectoryMoveToNonExistentDirectory/dir");
fs.mkdirs(src);
- Path dst = path("/test/new/newdir");
+ Path dst = path("testRenameDirectoryMoveToNonExistentDirectoryNew/newdir");
rename(src, dst, false, true, false);
}
public void testRenameDirectoryMoveToExistingDirectory() throws Exception {
if (!renameSupported()) return;
+ final String testDir = "testRenameDirectoryMoveToExistingDirectory";
- Path src = path("/test/hadoop/dir");
+ Path src = path(testDir + "/dir");
fs.mkdirs(src);
- createFile(path("/test/hadoop/dir/file1"));
- createFile(path("/test/hadoop/dir/subdir/file2"));
+ createFile(path(testDir + "/dir/file1"));
+ createFile(path(testDir + "/dir/subdir/file2"));
- Path dst = path("/test/new/newdir");
+ Path dst = path(testDir + "New/newdir");
fs.mkdirs(dst.getParent());
rename(src, dst, true, false, true);
assertFalse("Nested file1 exists",
- fs.exists(path("/test/hadoop/dir/file1")));
+ fs.exists(path(testDir + "/dir/file1")));
assertFalse("Nested file2 exists",
- fs.exists(path("/test/hadoop/dir/subdir/file2")));
+ fs.exists(path(testDir + "/dir/subdir/file2")));
assertTrue("Renamed nested file1 exists",
- fs.exists(path("/test/new/newdir/file1")));
+ fs.exists(path(testDir + "New/newdir/file1")));
assertTrue("Renamed nested exists",
- fs.exists(path("/test/new/newdir/subdir/file2")));
+ fs.exists(path(testDir + "New/newdir/subdir/file2")));
}
public void testRenameDirectoryAsExistingFile() throws Exception {
if (!renameSupported()) return;
- Path src = path("/test/hadoop/dir");
+ Path src = path("testRenameDirectoryAsExistingFile/dir");
fs.mkdirs(src);
- Path dst = path("/test/new/newfile");
+ Path dst = path("testRenameDirectoryAsExistingFileNew/newfile");
createFile(dst);
rename(src, dst, false, true, true);
}
public void testRenameDirectoryAsExistingDirectory() throws Exception {
if (!renameSupported()) return;
-
- Path src = path("/test/hadoop/dir");
+ final String testDir = "testRenameDirectoryAsExistingDirectory";
+ Path src = path(testDir + "/dir");
fs.mkdirs(src);
- createFile(path("/test/hadoop/dir/file1"));
- createFile(path("/test/hadoop/dir/subdir/file2"));
-
- Path dst = path("/test/new/newdir");
+ createFile(path(testDir + "/file1"));
+ createFile(path(testDir + "/subdir/file2"));
+
+ Path dst = path(testDir + "New/newdir");
fs.mkdirs(dst);
rename(src, dst, true, false, true);
assertTrue("Destination changed",
- fs.exists(path("/test/new/newdir/dir")));
+ fs.exists(path(testDir + "New/newdir/dir")));
assertFalse("Nested file1 exists",
- fs.exists(path("/test/hadoop/dir/file1")));
+ fs.exists(path(testDir + "/dir/file1")));
assertFalse("Nested file2 exists",
- fs.exists(path("/test/hadoop/dir/subdir/file2")));
+ fs.exists(path(testDir + "/dir/subdir/file2")));
assertTrue("Renamed nested file1 exists",
- fs.exists(path("/test/new/newdir/dir/file1")));
+ fs.exists(path(testDir + "New/newdir/dir/file1")));
assertTrue("Renamed nested exists",
- fs.exists(path("/test/new/newdir/dir/subdir/file2")));
+ fs.exists(path(testDir + "New/newdir/dir/subdir/file2")));
}
public void testInputStreamClosedTwice() throws IOException {
//HADOOP-4760 according to Closeable#close() closing already-closed
//streams should have no effect.
- Path src = path("/test/hadoop/file");
+ Path src = path("testInputStreamClosedTwice/file");
createFile(src);
FSDataInputStream in = fs.open(src);
in.close();
@@ -502,18 +552,13 @@ public void testInputStreamClosedTwice() throws IOException {
public void testOutputStreamClosedTwice() throws IOException {
//HADOOP-4760 according to Closeable#close() closing already-closed
//streams should have no effect.
- Path src = path("/test/hadoop/file");
+ Path src = path("testOutputStreamClosedTwice/file");
FSDataOutputStream out = fs.create(src);
out.writeChar('H'); //write some data
out.close();
out.close();
}
-
- protected Path path(String pathString) {
- return new Path(pathString).makeQualified(fs.getUri(),
- fs.getWorkingDirectory());
- }
-
+
protected void createFile(Path path) throws IOException {
FSDataOutputStream out = fs.create(path);
out.write(data, 0, data.length);
@@ -541,7 +586,7 @@ public void testOverWriteAndRead() throws Exception {
byte[] filedata1 = dataset(blockSize * 2, 'A', 26);
byte[] filedata2 = dataset(blockSize * 2, 'a', 26);
- Path path = path("/test/hadoop/file-overwrite");
+ Path path = path("testOverWriteAndRead/file-overwrite");
writeAndRead(path, filedata1, blockSize, true, false);
writeAndRead(path, filedata2, blockSize, true, false);
writeAndRead(path, filedata1, blockSize * 2, true, false);
@@ -561,7 +606,7 @@ public void testFilesystemIsCaseSensitive() throws Exception {
LOG.info("Skipping test");
return;
}
- String mixedCaseFilename = "/test/UPPER.TXT";
+ String mixedCaseFilename = "testFilesystemIsCaseSensitive";
Path upper = path(mixedCaseFilename);
Path lower = path(StringUtils.toLowerCase(mixedCaseFilename));
assertFalse("File exists" + upper, fs.exists(upper));
@@ -592,7 +637,7 @@ public void testFilesystemIsCaseSensitive() throws Exception {
* @throws Exception on failures
*/
public void testZeroByteFilesAreFiles() throws Exception {
- Path src = path("/test/testZeroByteFilesAreFiles");
+ Path src = path("testZeroByteFilesAreFiles");
//create a zero byte file
FSDataOutputStream out = fs.create(src);
out.close();
@@ -605,7 +650,7 @@ public void testZeroByteFilesAreFiles() throws Exception {
* @throws Exception on failures
*/
public void testMultiByteFilesAreFiles() throws Exception {
- Path src = path("/test/testMultiByteFilesAreFiles");
+ Path src = path("testMultiByteFilesAreFiles");
FSDataOutputStream out = fs.create(src);
out.writeUTF("testMultiByteFilesAreFiles");
out.close();
@@ -629,10 +674,14 @@ public void testRootDirAlwaysExists() throws Exception {
* @throws Exception on failures
*/
public void testRenameRootDirForbidden() throws Exception {
+ if (!rootDirTestEnabled()) {
+ return;
+ }
+
if (!renameSupported()) return;
rename(path("/"),
- path("/test/newRootDir"),
+ path("testRenameRootDirForbidden"),
false, true, false);
}
@@ -644,7 +693,7 @@ public void testRenameRootDirForbidden() throws Exception {
public void testRenameChildDirForbidden() throws Exception {
if (!renameSupported()) return;
LOG.info("testRenameChildDirForbidden");
- Path parentdir = path("/test/parentdir");
+ Path parentdir = path("testRenameChildDirForbidden");
fs.mkdirs(parentdir);
Path childFile = new Path(parentdir, "childfile");
createFile(childFile);
@@ -663,9 +712,9 @@ public void testRenameChildDirForbidden() throws Exception {
*/
public void testRenameToDirWithSamePrefixAllowed() throws Throwable {
if (!renameSupported()) return;
- Path parentdir = path("test/parentdir");
+ final Path parentdir = path("testRenameToDirWithSamePrefixAllowed");
fs.mkdirs(parentdir);
- Path dest = path("test/parentdirdest");
+ final Path dest = path("testRenameToDirWithSamePrefixAllowedDest");
rename(parentdir, dest, true, false, true);
}
@@ -677,7 +726,7 @@ public void testRenameDirToSelf() throws Throwable {
if (!renameSupported()) {
return;
}
- Path parentdir = path("test/parentdir");
+ Path parentdir = path("testRenameDirToSelf");
fs.mkdirs(parentdir);
Path child = new Path(parentdir, "child");
createFile(child);
@@ -696,7 +745,7 @@ public void testMoveDirUnderParent() throws Throwable {
if (!renameSupported()) {
return;
}
- Path testdir = path("test/dir");
+ Path testdir = path("testMoveDirUnderParent");
fs.mkdirs(testdir);
Path parent = testdir.getParent();
//the outcome here is ambiguous, so is not checked
@@ -711,7 +760,7 @@ public void testMoveDirUnderParent() throws Throwable {
*/
public void testRenameFileToSelf() throws Throwable {
if (!renameSupported()) return;
- Path filepath = path("test/file");
+ Path filepath = path("testRenameFileToSelf");
createFile(filepath);
//HDFS expects rename src, src -> true
rename(filepath, filepath, true, true, true);
@@ -725,7 +774,7 @@ public void testRenameFileToSelf() throws Throwable {
*/
public void testMoveFileUnderParent() throws Throwable {
if (!renameSupported()) return;
- Path filepath = path("test/file");
+ Path filepath = path("testMoveFileUnderParent");
createFile(filepath);
//HDFS expects rename src, src -> true
rename(filepath, filepath, true, true, true);
@@ -734,15 +783,23 @@ public void testMoveFileUnderParent() throws Throwable {
}
public void testLSRootDir() throws Throwable {
+ if (!rootDirTestEnabled()) {
+ return;
+ }
+
Path dir = path("/");
- Path child = path("/test");
+ Path child = path("/FileSystemContractBaseTest");
createFile(child);
assertListFilesFinds(dir, child);
}
public void testListStatusRootDir() throws Throwable {
+ if (!rootDirTestEnabled()) {
+ return;
+ }
+
Path dir = path("/");
- Path child = path("/test");
+ Path child = path("/FileSystemContractBaseTest");
createFile(child);
assertListStatusFinds(dir, child);
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestRawLocalFileSystemContract.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestRawLocalFileSystemContract.java
index 036fb6af97..b023c091d3 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestRawLocalFileSystemContract.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestRawLocalFileSystemContract.java
@@ -33,6 +33,8 @@ public class TestRawLocalFileSystemContract extends FileSystemContractBaseTest {
private static final Logger LOG =
LoggerFactory.getLogger(TestRawLocalFileSystemContract.class);
+ private final static Path TEST_BASE_DIR =
+ new Path(GenericTestUtils.getTempPath(""));
@Before
public void setUp() throws Exception {
@@ -51,21 +53,25 @@ protected boolean renameSupported() {
return false;
}
+ /**
+ * Disabling testing root operation.
+ *
+ * Writing to root directory on the local file system may get permission
+ * denied exception, or even worse, delete/overwrite files accidentally.
+ */
+ @Override
+ protected boolean rootDirTestEnabled() {
+ return false;
+ }
+
@Override
public String getDefaultWorkingDirectory() {
return fs.getWorkingDirectory().toUri().getPath();
}
@Override
- protected Path path(String pathString) {
- // For testWorkingDirectory
- if (pathString.equals(getDefaultWorkingDirectory()) ||
- pathString.equals(".") || pathString.equals("..")) {
- return super.path(pathString);
- }
-
- return new Path(GenericTestUtils.getTempPath(pathString)).
- makeQualified(fs.getUri(), fs.getWorkingDirectory());
+ protected Path getTestBaseDir() {
+ return TEST_BASE_DIR;
}
@Override
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileSystemContract.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileSystemContract.java
index f39ec81a79..5d47e47feb 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileSystemContract.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileSystemContract.java
@@ -57,30 +57,13 @@ public void setUp() throws Exception {
fs = S3ATestUtils.createTestFileSystem(conf);
basePath = fs.makeQualified(
- S3ATestUtils.createTestPath(new Path("/s3afilesystemcontract")));
+ S3ATestUtils.createTestPath(new Path("s3afilesystemcontract")));
super.setUp();
}
- /**
- * This path explicitly places all absolute paths under the per-test suite
- * path directory; this allows the test to run in parallel.
- * @param pathString path string as input
- * @return a qualified path string.
- */
- protected Path path(String pathString) {
- if (pathString.startsWith("/")) {
- return fs.makeQualified(new Path(basePath, pathString));
- } else {
- return super.path(pathString);
- }
- }
-
@Override
- protected void tearDown() throws Exception {
- if (fs != null) {
- fs.delete(basePath, true);
- }
- super.tearDown();
+ public Path getTestBaseDir() {
+ return basePath;
}
@Override
@@ -94,22 +77,22 @@ public void testRenameDirectoryAsExistingDirectory() throws Exception {
return;
}
- Path src = path("/test/hadoop/dir");
+ Path src = path("testRenameDirectoryAsExisting/dir");
fs.mkdirs(src);
- createFile(path("/test/hadoop/dir/file1"));
- createFile(path("/test/hadoop/dir/subdir/file2"));
+ createFile(path("testRenameDirectoryAsExisting/dir/file1"));
+ createFile(path("testRenameDirectoryAsExisting/dir/subdir/file2"));
- Path dst = path("/test/new/newdir");
+ Path dst = path("testRenameDirectoryAsExisting/newdir");
fs.mkdirs(dst);
rename(src, dst, true, false, true);
assertFalse("Nested file1 exists",
- fs.exists(path("/test/hadoop/dir/file1")));
+ fs.exists(path("testRenameDirectoryAsExisting/dir/file1")));
assertFalse("Nested file2 exists",
- fs.exists(path("/test/hadoop/dir/subdir/file2")));
+ fs.exists(path("testRenameDirectoryAsExisting/dir/subdir/file2")));
assertTrue("Renamed nested file1 exists",
- fs.exists(path("/test/new/newdir/file1")));
+ fs.exists(path("testRenameDirectoryAsExisting/newdir/file1")));
assertTrue("Renamed nested exists",
- fs.exists(path("/test/new/newdir/subdir/file2")));
+ fs.exists(path("testRenameDirectoryAsExisting/newdir/subdir/file2")));
}
// @Override