From 6a52b5e14495c5b2e0257aec65e61acd43aef309 Mon Sep 17 00:00:00 2001 From: Akira Ajisaka Date: Thu, 25 May 2017 15:36:14 +0900 Subject: [PATCH] HADOOP-14180. FileSystem contract tests to replace JUnit 3 with 4. Contributed by Xiaobing Zhou and Mingliang Liu. --- .../hadoop/fs/FileSystemContractBaseTest.java | 142 ++++++++++++------ .../hdfs/TestHDFSFileSystemContract.java | 12 +- .../web/TestWebHdfsFileSystemContract.java | 18 ++- .../oss/TestAliyunOSSFileSystemContract.java | 130 ++++++++-------- .../fs/s3a/ITestS3AFileSystemContract.java | 21 +-- .../NativeS3FileSystemContractBaseTest.java | 26 +++- .../live/TestAdlFileSystemContractLive.java | 25 ++- ...NativeAzureFileSystemContractEmulator.java | 20 ++- ...TestNativeAzureFileSystemContractLive.java | 32 ++-- ...stNativeAzureFileSystemContractMocked.java | 11 +- ...veAzureFileSystemContractPageBlobLive.java | 25 +-- .../fs/swift/TestSwiftFileSystemContract.java | 16 +- 12 files changed, 280 insertions(+), 198 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java index 040e9c8074..92e2135e0b 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileSystemContractBaseTest.java @@ -22,8 +22,6 @@ import java.io.IOException; import java.util.ArrayList; -import junit.framework.TestCase; - import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -32,6 +30,15 @@ import org.apache.hadoop.security.AccessControlException; import org.apache.hadoop.util.StringUtils; +import static org.junit.Assert.*; +import static org.junit.Assume.assumeTrue; + +import org.junit.After; +import org.junit.Rule; +import org.junit.Test; +import org.junit.rules.ExpectedException; +import org.junit.rules.Timeout; + /** *

* A collection of tests for the contract of the {@link FileSystem}. @@ -41,11 +48,11 @@ *

*

* To test a given {@link FileSystem} implementation create a subclass of this - * test and override {@link #setUp()} to initialize the fs + * test and add a @Before method to initialize the fs * {@link FileSystem} instance variable. *

*/ -public abstract class FileSystemContractBaseTest extends TestCase { +public abstract class FileSystemContractBaseTest { private static final Logger LOG = LoggerFactory.getLogger(FileSystemContractBaseTest.class); @@ -53,8 +60,13 @@ public abstract class FileSystemContractBaseTest extends TestCase { protected FileSystem fs; protected byte[] data = dataset(getBlockSize() * 2, 0, 255); - @Override - protected void tearDown() throws Exception { + @Rule + public Timeout globalTimeout = new Timeout(30000); + @Rule + public ExpectedException thrown = ExpectedException.none(); + + @After + public void tearDown() throws Exception { if (fs != null) { // some cases use this absolute path if (rootDirTestEnabled()) { @@ -63,7 +75,6 @@ protected void tearDown() throws Exception { // others use this relative path against test base directory cleanupDir(getTestBaseDir()); } - super.tearDown(); } private void cleanupDir(Path p) { @@ -131,6 +142,7 @@ protected boolean filesystemIsCaseSensitive() { return true; } + @Test public void testFsStatus() throws Exception { FsStatus fsStatus = fs.getStatus(); assertNotNull(fsStatus); @@ -140,6 +152,7 @@ public void testFsStatus() throws Exception { assertTrue(fsStatus.getCapacity() >= 0); } + @Test public void testWorkingDirectory() throws Exception { Path workDir = path(getDefaultWorkingDirectory()); @@ -160,7 +173,8 @@ public void testWorkingDirectory() throws Exception { assertEquals(absoluteDir, fs.getWorkingDirectory()); } - + + @Test public void testMkdirs() throws Exception { Path testDir = path("testMkdirs"); assertFalse(fs.exists(testDir)); @@ -187,6 +201,7 @@ public void testMkdirs() throws Exception { } + @Test public void testMkdirsFailsForSubdirectoryOfExistingFile() throws Exception { Path testDir = path("testMkdirsFailsForSubdirectoryOfExistingFile"); assertFalse(fs.exists(testDir)); @@ -229,6 +244,7 @@ public void testMkdirsFailsForSubdirectoryOfExistingFile() throws Exception { } + @Test public void testMkdirsWithUmask() throws Exception { if (!isS3(fs)) { Configuration conf = fs.getConf(); @@ -265,6 +281,7 @@ private boolean isS3(FileSystem fileSystem) { return false; } + @Test public void testGetFileStatusThrowsExceptionForNonExistentFile() throws Exception { try { @@ -276,6 +293,7 @@ public void testGetFileStatusThrowsExceptionForNonExistentFile() } } + @Test public void testListStatusThrowsExceptionForNonExistentFile() throws Exception { try { fs.listStatus( @@ -286,6 +304,7 @@ public void testListStatusThrowsExceptionForNonExistentFile() throws Exception { } } + @Test public void testListStatus() throws Exception { final Path[] testDirs = { path("testListStatus/a"), @@ -316,22 +335,27 @@ public void testListStatus() throws Exception { assertEquals(0, paths.length); } + @Test public void testWriteReadAndDeleteEmptyFile() throws Exception { writeReadAndDelete(0); } + @Test public void testWriteReadAndDeleteHalfABlock() throws Exception { writeReadAndDelete(getBlockSize() / 2); } + @Test public void testWriteReadAndDeleteOneBlock() throws Exception { writeReadAndDelete(getBlockSize()); } + @Test public void testWriteReadAndDeleteOneAndAHalfBlocks() throws Exception { writeReadAndDelete(getBlockSize() + (getBlockSize() / 2)); } - + + @Test public void testWriteReadAndDeleteTwoBlocks() throws Exception { writeReadAndDelete(getBlockSize() * 2); } @@ -346,7 +370,8 @@ protected void writeReadAndDelete(int len) throws IOException { Path path = path("writeReadAndDelete/file"); writeAndRead(path, data, len, false, true); } - + + @Test public void testOverwrite() throws IOException { Path path = path("testOverwrite/file"); @@ -372,7 +397,8 @@ public void testOverwrite() throws IOException { assertEquals("Length", data.length, fs.getFileStatus(path).getLen()); } - + + @Test public void testWriteInNonExistentDirectory() throws IOException { Path path = path("testWriteInNonExistentDirectory/file"); assertFalse("Parent exists", fs.exists(path.getParent())); @@ -383,12 +409,14 @@ public void testWriteInNonExistentDirectory() throws IOException { assertTrue("Parent exists", fs.exists(path.getParent())); } + @Test public void testDeleteNonExistentFile() throws IOException { Path path = path("testDeleteNonExistentFile/file"); assertFalse("Path exists: " + path, fs.exists(path)); assertFalse("No deletion", fs.delete(path, true)); } - + + @Test public void testDeleteRecursively() throws IOException { Path dir = path("testDeleteRecursively"); Path file = path("testDeleteRecursively/file"); @@ -416,7 +444,8 @@ public void testDeleteRecursively() throws IOException { assertFalse("Dir doesn't exist", fs.exists(dir)); assertFalse("Subdir doesn't exist", fs.exists(subdir)); } - + + @Test public void testDeleteEmptyDirectory() throws IOException { Path dir = path("testDeleteEmptyDirectory"); assertTrue(fs.mkdirs(dir)); @@ -424,17 +453,19 @@ public void testDeleteEmptyDirectory() throws IOException { assertTrue("Deleted", fs.delete(dir, false)); assertFalse("Dir doesn't exist", fs.exists(dir)); } - + + @Test public void testRenameNonExistentPath() throws Exception { - if (!renameSupported()) return; + assumeTrue(renameSupported()); Path src = path("testRenameNonExistentPath/path"); Path dst = path("testRenameNonExistentPathNew/newpath"); rename(src, dst, false, false, false); } + @Test public void testRenameFileMoveToNonExistentDirectory() throws Exception { - if (!renameSupported()) return; + assumeTrue(renameSupported()); Path src = path("testRenameFileMoveToNonExistentDirectory/file"); createFile(src); @@ -442,8 +473,9 @@ public void testRenameFileMoveToNonExistentDirectory() throws Exception { rename(src, dst, false, true, false); } + @Test public void testRenameFileMoveToExistingDirectory() throws Exception { - if (!renameSupported()) return; + assumeTrue(renameSupported()); Path src = path("testRenameFileMoveToExistingDirectory/file"); createFile(src); @@ -452,8 +484,9 @@ public void testRenameFileMoveToExistingDirectory() throws Exception { rename(src, dst, true, false, true); } + @Test public void testRenameFileAsExistingFile() throws Exception { - if (!renameSupported()) return; + assumeTrue(renameSupported()); Path src = path("testRenameFileAsExistingFile/file"); createFile(src); @@ -462,8 +495,9 @@ public void testRenameFileAsExistingFile() throws Exception { rename(src, dst, false, true, true); } + @Test public void testRenameFileAsExistingDirectory() throws Exception { - if (!renameSupported()) return; + assumeTrue(renameSupported()); Path src = path("testRenameFileAsExistingDirectory/file"); createFile(src); @@ -472,19 +506,21 @@ public void testRenameFileAsExistingDirectory() throws Exception { rename(src, dst, true, false, true); assertIsFile(path("testRenameFileAsExistingDirectoryNew/newdir/file")); } - + + @Test public void testRenameDirectoryMoveToNonExistentDirectory() throws Exception { - if (!renameSupported()) return; + assumeTrue(renameSupported()); Path src = path("testRenameDirectoryMoveToNonExistentDirectory/dir"); fs.mkdirs(src); Path dst = path("testRenameDirectoryMoveToNonExistentDirectoryNew/newdir"); rename(src, dst, false, true, false); } - + + @Test public void testRenameDirectoryMoveToExistingDirectory() throws Exception { - if (!renameSupported()) return; + assumeTrue(renameSupported()); Path src = path("testRenameDirectoryMoveToExistingDirectory/dir"); fs.mkdirs(src); createFile(path(src + "/file1")); @@ -503,9 +539,10 @@ public void testRenameDirectoryMoveToExistingDirectory() throws Exception { assertTrue("Renamed nested exists", fs.exists(path(dst + "/subdir/file2"))); } - + + @Test public void testRenameDirectoryAsExistingFile() throws Exception { - if (!renameSupported()) return; + assumeTrue(renameSupported()); Path src = path("testRenameDirectoryAsExistingFile/dir"); fs.mkdirs(src); @@ -513,9 +550,10 @@ public void testRenameDirectoryAsExistingFile() throws Exception { createFile(dst); rename(src, dst, false, true, true); } - + + @Test public void testRenameDirectoryAsExistingDirectory() throws Exception { - if (!renameSupported()) return; + assumeTrue(renameSupported()); final Path src = path("testRenameDirectoryAsExistingDirectory/dir"); fs.mkdirs(src); createFile(path(src + "/file1")); @@ -536,6 +574,7 @@ public void testRenameDirectoryAsExistingDirectory() throws Exception { fs.exists(path(dst + "/dir/subdir/file2"))); } + @Test public void testInputStreamClosedTwice() throws IOException { //HADOOP-4760 according to Closeable#close() closing already-closed //streams should have no effect. @@ -545,7 +584,8 @@ public void testInputStreamClosedTwice() throws IOException { in.close(); in.close(); } - + + @Test public void testOutputStreamClosedTwice() throws IOException { //HADOOP-4760 according to Closeable#close() closing already-closed //streams should have no effect. @@ -577,7 +617,7 @@ protected void rename(Path src, Path dst, boolean renameSucceeded, * * @throws Exception on any failure */ - + @Test public void testOverWriteAndRead() throws Exception { int blockSize = getBlockSize(); @@ -598,6 +638,7 @@ public void testOverWriteAndRead() throws Exception { * its lower case version is not there. * @throws Exception */ + @Test public void testFilesystemIsCaseSensitive() throws Exception { if (!filesystemIsCaseSensitive()) { LOG.info("Skipping test"); @@ -633,6 +674,7 @@ public void testFilesystemIsCaseSensitive() throws Exception { * directory or symlink * @throws Exception on failures */ + @Test public void testZeroByteFilesAreFiles() throws Exception { Path src = path("testZeroByteFilesAreFiles"); //create a zero byte file @@ -646,6 +688,7 @@ public void testZeroByteFilesAreFiles() throws Exception { * directory or symlink * @throws Exception on failures */ + @Test public void testMultiByteFilesAreFiles() throws Exception { Path src = path("testMultiByteFilesAreFiles"); FSDataOutputStream out = fs.create(src); @@ -658,6 +701,7 @@ public void testMultiByteFilesAreFiles() throws Exception { * Assert that root directory renames are not allowed * @throws Exception on failures */ + @Test public void testRootDirAlwaysExists() throws Exception { //this will throw an exception if the path is not found fs.getFileStatus(path("/")); @@ -670,12 +714,10 @@ public void testRootDirAlwaysExists() throws Exception { * Assert that root directory renames are not allowed * @throws Exception on failures */ + @Test public void testRenameRootDirForbidden() throws Exception { - if (!rootDirTestEnabled()) { - return; - } - - if (!renameSupported()) return; + assumeTrue(rootDirTestEnabled()); + assumeTrue(renameSupported()); rename(path("/"), path("testRenameRootDirForbidden"), @@ -687,8 +729,9 @@ public void testRenameRootDirForbidden() throws Exception { * of itself is forbidden * @throws Exception on failures */ + @Test public void testRenameChildDirForbidden() throws Exception { - if (!renameSupported()) return; + assumeTrue(renameSupported()); LOG.info("testRenameChildDirForbidden"); Path parentdir = path("testRenameChildDirForbidden"); fs.mkdirs(parentdir); @@ -707,8 +750,9 @@ public void testRenameChildDirForbidden() throws Exception { * This a sanity check to make sure that any filesystem's handling of * renames doesn't cause any regressions */ + @Test public void testRenameToDirWithSamePrefixAllowed() throws Throwable { - if (!renameSupported()) return; + assumeTrue(renameSupported()); final Path parentdir = path("testRenameToDirWithSamePrefixAllowed"); fs.mkdirs(parentdir); final Path dest = path("testRenameToDirWithSamePrefixAllowedDest"); @@ -719,10 +763,9 @@ public void testRenameToDirWithSamePrefixAllowed() throws Throwable { * trying to rename a directory onto itself should fail, * preserving everything underneath. */ + @Test public void testRenameDirToSelf() throws Throwable { - if (!renameSupported()) { - return; - } + assumeTrue(renameSupported()); Path parentdir = path("testRenameDirToSelf"); fs.mkdirs(parentdir); Path child = new Path(parentdir, "child"); @@ -738,10 +781,9 @@ public void testRenameDirToSelf() throws Throwable { * a destination path of its original name, which should then fail. * The source path and the destination path should still exist afterwards */ + @Test public void testMoveDirUnderParent() throws Throwable { - if (!renameSupported()) { - return; - } + assumeTrue(renameSupported()); Path testdir = path("testMoveDirUnderParent"); fs.mkdirs(testdir); Path parent = testdir.getParent(); @@ -755,8 +797,9 @@ public void testMoveDirUnderParent() throws Throwable { * trying to rename a file onto itself should succeed (it's a no-op) * */ + @Test public void testRenameFileToSelf() throws Throwable { - if (!renameSupported()) return; + assumeTrue(renameSupported()); Path filepath = path("testRenameFileToSelf"); createFile(filepath); //HDFS expects rename src, src -> true @@ -769,8 +812,9 @@ public void testRenameFileToSelf() throws Throwable { * trying to move a file into it's parent dir should succeed * again: no-op */ + @Test public void testMoveFileUnderParent() throws Throwable { - if (!renameSupported()) return; + assumeTrue(renameSupported()); Path filepath = path("testMoveFileUnderParent"); createFile(filepath); //HDFS expects rename src, src -> true @@ -779,10 +823,9 @@ public void testMoveFileUnderParent() throws Throwable { assertIsFile(filepath); } + @Test public void testLSRootDir() throws Throwable { - if (!rootDirTestEnabled()) { - return; - } + assumeTrue(rootDirTestEnabled()); Path dir = path("/"); Path child = path("/FileSystemContractBaseTest"); @@ -790,10 +833,9 @@ public void testLSRootDir() throws Throwable { assertListFilesFinds(dir, child); } + @Test public void testListStatusRootDir() throws Throwable { - if (!rootDirTestEnabled()) { - return; - } + assumeTrue(rootDirTestEnabled()); Path dir = path("/"); Path child = path("/FileSystemContractBaseTest"); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHDFSFileSystemContract.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHDFSFileSystemContract.java index c1bf6f2f28..50d1e75ed6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHDFSFileSystemContract.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHDFSFileSystemContract.java @@ -25,14 +25,17 @@ import org.apache.hadoop.fs.FileSystemContractBaseTest; import org.apache.hadoop.fs.Path; import org.apache.hadoop.security.UserGroupInformation; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; public class TestHDFSFileSystemContract extends FileSystemContractBaseTest { private MiniDFSCluster cluster; private String defaultWorkingDirectory; - @Override - protected void setUp() throws Exception { + @Before + public void setUp() throws Exception { Configuration conf = new HdfsConfiguration(); conf.set(CommonConfigurationKeys.FS_PERMISSIONS_UMASK_KEY, FileSystemContractBaseTest.TEST_UMASK); @@ -42,8 +45,8 @@ protected void setUp() throws Exception { UserGroupInformation.getCurrentUser().getShortUserName(); } - @Override - protected void tearDown() throws Exception { + @After + public void tearDown() throws Exception { super.tearDown(); if (cluster != null) { cluster.shutdown(); @@ -56,6 +59,7 @@ protected String getDefaultWorkingDirectory() { return defaultWorkingDirectory; } + @Test public void testAppend() throws IOException { AppendTestUtil.testAppend(fs, new Path("/testAppend/f")); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java index 4854471663..82853526a5 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHdfsFileSystemContract.java @@ -18,6 +18,8 @@ package org.apache.hadoop.hdfs.web; +import static org.junit.Assert.*; + import java.io.BufferedReader; import java.io.FileNotFoundException; import java.io.IOException; @@ -51,6 +53,7 @@ import org.apache.hadoop.security.UserGroupInformation; import org.apache.hadoop.test.GenericTestUtils; import org.junit.Assert; +import org.junit.Before; import org.junit.Test; public class TestWebHdfsFileSystemContract extends FileSystemContractBaseTest { @@ -73,8 +76,8 @@ public class TestWebHdfsFileSystemContract extends FileSystemContractBaseTest { } } - @Override - protected void setUp() throws Exception { + @Before + public void setUp() throws Exception { //get file system as a non-superuser final UserGroupInformation current = UserGroupInformation.getCurrentUser(); ugi = UserGroupInformation.createUserForTesting( @@ -92,7 +95,7 @@ protected String getDefaultWorkingDirectory() { * when calling exist(..) on a path /foo/bar/file * but /foo/bar is indeed a file in HDFS. */ - @Override + @Test public void testMkdirsFailsForSubdirectoryOfExistingFile() throws Exception { Path testDir = path("/test/hadoop"); assertFalse(fs.exists(testDir)); @@ -130,6 +133,7 @@ public void testMkdirsFailsForSubdirectoryOfExistingFile() throws Exception { //the following are new tests (i.e. not over-riding the super class methods) + @Test public void testGetFileBlockLocations() throws IOException { final String f = "/test/testGetFileBlockLocations"; createFile(path(f)); @@ -154,6 +158,7 @@ public void testGetFileBlockLocations() throws IOException { } } + @Test public void testCaseInsensitive() throws IOException { final Path p = new Path("/test/testCaseInsensitive"); final WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)fs; @@ -180,6 +185,7 @@ public void testCaseInsensitive() throws IOException { assertTrue(fs.getFileStatus(p).isDirectory()); } + @Test public void testOpenNonExistFile() throws IOException { final Path p = new Path("/test/testOpenNonExistFile"); //open it as a file, should get FileNotFoundException @@ -191,6 +197,7 @@ public void testOpenNonExistFile() throws IOException { } } + @Test public void testSeek() throws IOException { final Path dir = new Path("/test/testSeek"); assertTrue(fs.mkdirs(dir)); @@ -252,6 +259,7 @@ public void testSeek() throws IOException { } + @Test public void testRootDir() throws IOException { final Path root = new Path("/"); @@ -291,6 +299,7 @@ public void testRootDir() throws IOException { /** * Test get with length parameter greater than actual file length. */ + @Test public void testLengthParamLongerThanFile() throws IOException { WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)fs; Path dir = new Path("/test"); @@ -340,6 +349,7 @@ public void testLengthParamLongerThanFile() throws IOException { * Test get with offset and length parameters that combine to request a length * greater than actual file length. */ + @Test public void testOffsetPlusLengthParamsLongerThanFile() throws IOException { WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)fs; Path dir = new Path("/test"); @@ -386,6 +396,7 @@ public void testOffsetPlusLengthParamsLongerThanFile() throws IOException { } } + @Test public void testResponseCode() throws IOException { final WebHdfsFileSystem webhdfs = (WebHdfsFileSystem)fs; final Path root = new Path("/"); @@ -533,6 +544,7 @@ public void testResponseCode() throws IOException { } } + @Test public void testDatanodeCreateMissingParameter() throws IOException { final WebHdfsFileSystem webhdfs = (WebHdfsFileSystem) fs; final Path testDir = new Path(MessageFormat.format("/test/{0}/{1}", diff --git a/hadoop-tools/hadoop-aliyun/src/test/java/org/apache/hadoop/fs/aliyun/oss/TestAliyunOSSFileSystemContract.java b/hadoop-tools/hadoop-aliyun/src/test/java/org/apache/hadoop/fs/aliyun/oss/TestAliyunOSSFileSystemContract.java index 69ce694ab8..419ddee225 100644 --- a/hadoop-tools/hadoop-aliyun/src/test/java/org/apache/hadoop/fs/aliyun/oss/TestAliyunOSSFileSystemContract.java +++ b/hadoop-tools/hadoop-aliyun/src/test/java/org/apache/hadoop/fs/aliyun/oss/TestAliyunOSSFileSystemContract.java @@ -22,6 +22,10 @@ import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.fs.FileSystemContractBaseTest; import org.apache.hadoop.fs.Path; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assume.*; import org.apache.hadoop.fs.FileStatus; import java.io.FileNotFoundException; @@ -41,11 +45,11 @@ public class TestAliyunOSSFileSystemContract private static Path testRootPath = new Path(AliyunOSSTestUtils.generateUniqueTestPath()); - @Override + @Before public void setUp() throws Exception { Configuration conf = new Configuration(); fs = AliyunOSSTestUtils.createTestFileSystem(conf); - super.setUp(); + assumeNotNull(fs); } @Override @@ -53,12 +57,12 @@ public Path getTestBaseDir() { return testRootPath; } - @Override + @Test public void testMkdirsWithUmask() throws Exception { // not supported } - @Override + @Test public void testRootDirAlwaysExists() throws Exception { //this will throw an exception if the path is not found fs.getFileStatus(super.path("/")); @@ -68,16 +72,15 @@ public void testRootDirAlwaysExists() throws Exception { fs.exists(super.path("/"))); } - @Override + @Test public void testRenameRootDirForbidden() throws Exception { - if (!renameSupported()) { - return; - } + assumeTrue(renameSupported()); rename(super.path("/"), super.path("/test/newRootDir"), false, true, false); } + @Test public void testDeleteSubdir() throws IOException { Path parentDir = this.path("/test/hadoop"); Path file = this.path("/test/hadoop/file"); @@ -102,87 +105,83 @@ protected boolean renameSupported() { return true; } - @Override + @Test public void testRenameNonExistentPath() throws Exception { - if (this.renameSupported()) { - Path src = this.path("/test/hadoop/path"); - Path dst = this.path("/test/new/newpath"); - try { - super.rename(src, dst, false, false, false); - fail("Should throw FileNotFoundException!"); - } catch (FileNotFoundException e) { - // expected - } + assumeTrue(renameSupported()); + Path src = this.path("/test/hadoop/path"); + Path dst = this.path("/test/new/newpath"); + try { + super.rename(src, dst, false, false, false); + fail("Should throw FileNotFoundException!"); + } catch (FileNotFoundException e) { + // expected } } - @Override + @Test public void testRenameFileMoveToNonExistentDirectory() throws Exception { - if (this.renameSupported()) { - Path src = this.path("/test/hadoop/file"); - this.createFile(src); - Path dst = this.path("/test/new/newfile"); - try { - super.rename(src, dst, false, true, false); - fail("Should throw FileNotFoundException!"); - } catch (FileNotFoundException e) { - // expected - } + assumeTrue(renameSupported()); + Path src = this.path("/test/hadoop/file"); + this.createFile(src); + Path dst = this.path("/test/new/newfile"); + try { + super.rename(src, dst, false, true, false); + fail("Should throw FileNotFoundException!"); + } catch (FileNotFoundException e) { + // expected } } - @Override + @Test public void testRenameDirectoryMoveToNonExistentDirectory() throws Exception { - if (this.renameSupported()) { - Path src = this.path("/test/hadoop/dir"); - this.fs.mkdirs(src); - Path dst = this.path("/test/new/newdir"); - try { - super.rename(src, dst, false, true, false); - fail("Should throw FileNotFoundException!"); - } catch (FileNotFoundException e) { - // expected - } + assumeTrue(renameSupported()); + Path src = this.path("/test/hadoop/dir"); + this.fs.mkdirs(src); + Path dst = this.path("/test/new/newdir"); + try { + super.rename(src, dst, false, true, false); + fail("Should throw FileNotFoundException!"); + } catch (FileNotFoundException e) { + // expected } } - @Override + @Test public void testRenameFileMoveToExistingDirectory() throws Exception { super.testRenameFileMoveToExistingDirectory(); } - @Override + @Test public void testRenameFileAsExistingFile() throws Exception { - if (this.renameSupported()) { - Path src = this.path("/test/hadoop/file"); - this.createFile(src); - Path dst = this.path("/test/new/newfile"); - this.createFile(dst); - try { - super.rename(src, dst, false, true, true); - fail("Should throw FileAlreadyExistsException"); - } catch (FileAlreadyExistsException e) { - // expected - } + assumeTrue(renameSupported()); + Path src = this.path("/test/hadoop/file"); + this.createFile(src); + Path dst = this.path("/test/new/newfile"); + this.createFile(dst); + try { + super.rename(src, dst, false, true, true); + fail("Should throw FileAlreadyExistsException"); + } catch (FileAlreadyExistsException e) { + // expected } } - @Override + @Test public void testRenameDirectoryAsExistingFile() throws Exception { - if (this.renameSupported()) { - Path src = this.path("/test/hadoop/dir"); - this.fs.mkdirs(src); - Path dst = this.path("/test/new/newfile"); - this.createFile(dst); - try { - super.rename(src, dst, false, true, true); - fail("Should throw FileAlreadyExistsException"); - } catch (FileAlreadyExistsException e) { - // expected - } + assumeTrue(renameSupported()); + Path src = this.path("/test/hadoop/dir"); + this.fs.mkdirs(src); + Path dst = this.path("/test/new/newfile"); + this.createFile(dst); + try { + super.rename(src, dst, false, true, true); + fail("Should throw FileAlreadyExistsException"); + } catch (FileAlreadyExistsException e) { + // expected } } + @Test public void testGetFileStatusFileAndDirectory() throws Exception { Path filePath = this.path("/test/oss/file1"); this.createFile(filePath); @@ -203,6 +202,7 @@ public void testGetFileStatusFileAndDirectory() throws Exception { } } + @Test public void testMkdirsForExistingFile() throws Exception { Path testFile = this.path("/test/hadoop/file"); assertFalse(this.fs.exists(testFile)); diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileSystemContract.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileSystemContract.java index 6fcf4c7d2e..1b49d079b6 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileSystemContract.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AFileSystemContract.java @@ -20,12 +20,15 @@ import org.junit.Before; import org.junit.Rule; +import org.junit.Test; import org.junit.rules.TestName; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystemContractBaseTest; import org.apache.hadoop.fs.Path; +import static org.junit.Assume.*; +import static org.junit.Assert.*; /** * Tests a live S3 system. If your keys and bucket aren't specified, all tests @@ -46,19 +49,19 @@ public class ITestS3AFileSystemContract extends FileSystemContractBaseTest { @Rule public TestName methodName = new TestName(); - @Before - public void nameThread() { + private void nameThread() { Thread.currentThread().setName("JUnit-" + methodName.getMethodName()); } - @Override + @Before public void setUp() throws Exception { + nameThread(); Configuration conf = new Configuration(); fs = S3ATestUtils.createTestFileSystem(conf); + assumeNotNull(fs); basePath = fs.makeQualified( S3ATestUtils.createTestPath(new Path("s3afilesystemcontract"))); - super.setUp(); } @Override @@ -66,16 +69,14 @@ public Path getTestBaseDir() { return basePath; } - @Override + @Test public void testMkdirsWithUmask() throws Exception { // not supported } - @Override + @Test public void testRenameDirectoryAsExistingDirectory() throws Exception { - if (!renameSupported()) { - return; - } + assumeTrue(renameSupported()); Path src = path("testRenameDirectoryAsExisting/dir"); fs.mkdirs(src); @@ -95,7 +96,7 @@ public void testRenameDirectoryAsExistingDirectory() throws Exception { fs.exists(path(dst + "/subdir/file2"))); } -// @Override + @Test public void testMoveDirUnderParent() throws Throwable { // not support because // Fails if dst is a directory that is not empty. diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/NativeS3FileSystemContractBaseTest.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/NativeS3FileSystemContractBaseTest.java index 261f79b039..a6dc2d3319 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/NativeS3FileSystemContractBaseTest.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3native/NativeS3FileSystemContractBaseTest.java @@ -28,7 +28,11 @@ import org.apache.hadoop.fs.FileSystemContractBaseTest; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.s3native.NativeS3FileSystem.NativeS3FsInputStream; +import org.junit.After; +import org.junit.Before; +import org.junit.Test; import org.junit.internal.AssumptionViolatedException; +import static org.junit.Assert.*; public abstract class NativeS3FileSystemContractBaseTest extends FileSystemContractBaseTest { @@ -37,8 +41,8 @@ public abstract class NativeS3FileSystemContractBaseTest abstract NativeFileSystemStore getNativeFileSystemStore() throws IOException; - @Override - protected void setUp() throws Exception { + @Before + public void setUp() throws Exception { Configuration conf = new Configuration(); store = getNativeFileSystemStore(); fs = new NativeS3FileSystem(store); @@ -50,17 +54,18 @@ protected void setUp() throws Exception { fs.initialize(URI.create(fsname), conf); } - @Override - protected void tearDown() throws Exception { + @After + public void tearDown() throws Exception { store.purge("test"); - super.tearDown(); } + @Test public void testCanonicalName() throws Exception { assertNull("s3n doesn't support security token and shouldn't have canonical name", fs.getCanonicalServiceName()); } + @Test public void testListStatusForRoot() throws Exception { FileStatus[] paths = fs.listStatus(path("/")); assertEquals("Root directory is not empty; ", 0, paths.length); @@ -73,6 +78,7 @@ public void testListStatusForRoot() throws Exception { assertEquals(path("/test"), paths[0].getPath()); } + @Test public void testNoTrailingBackslashOnBucket() throws Exception { assertTrue(fs.getFileStatus(new Path(fs.getUri().toString())).isDirectory()); } @@ -83,6 +89,7 @@ private void createTestFiles(String base) throws IOException { store.storeEmptyFile(base + "/dir/file3"); } + @Test public void testDirWithDifferentMarkersWorks() throws Exception { for (int i = 0; i <= 3; i++) { @@ -117,6 +124,7 @@ else if (i == 3) { } } + @Test public void testDeleteWithNoMarker() throws Exception { String base = "test/hadoop"; Path path = path("/" + base); @@ -130,6 +138,7 @@ public void testDeleteWithNoMarker() throws Exception { assertEquals(0, fs.listStatus(path).length); } + @Test public void testRenameWithNoMarker() throws Exception { String base = "test/hadoop"; Path dest = path("/test/hadoop2"); @@ -145,11 +154,13 @@ public void testRenameWithNoMarker() throws Exception { assertEquals(2, fs.listStatus(dest).length); } + @Test public void testEmptyFile() throws Exception { store.storeEmptyFile("test/hadoop/file1"); fs.open(path("/test/hadoop/file1")).close(); } - + + @Test public void testBlockSize() throws Exception { Path file = path("/test/hadoop/file"); createFile(file); @@ -162,7 +173,8 @@ public void testBlockSize() throws Exception { assertEquals("Double default block size", newBlockSize, fs.getFileStatus(file).getBlockSize()); } - + + @Test public void testRetryOnIoException() throws Exception { class TestInputStream extends InputStream { boolean shouldThrow = true; diff --git a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlFileSystemContractLive.java b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlFileSystemContractLive.java index 9d055f18b2..34c9f79c45 100644 --- a/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlFileSystemContractLive.java +++ b/hadoop-tools/hadoop-azure-datalake/src/test/java/org/apache/hadoop/fs/adl/live/TestAdlFileSystemContractLive.java @@ -22,7 +22,8 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystemContractBaseTest; import org.apache.hadoop.fs.Path; -import org.junit.Assume; +import org.junit.After; +import static org.junit.Assume.*; import org.junit.Before; import java.io.IOException; @@ -33,16 +34,18 @@ public class TestAdlFileSystemContractLive extends FileSystemContractBaseTest { private FileSystem adlStore; - @Override - protected void setUp() throws Exception { + @Before + public void setUp() throws Exception { + skipTestCheck(); adlStore = AdlStorageConfiguration.createStorageConnector(); if (AdlStorageConfiguration.isContractTestEnabled()) { fs = adlStore; } + assumeNotNull(fs); } - @Override - protected void tearDown() throws Exception { + @After + public void tearDown() throws Exception { if (AdlStorageConfiguration.isContractTestEnabled()) { cleanup(); } @@ -53,15 +56,7 @@ private void cleanup() throws IOException { adlStore.delete(new Path("/test"), true); } - @Override - protected void runTest() throws Throwable { - if (AdlStorageConfiguration.isContractTestEnabled()) { - super.runTest(); - } - } - - @Before - public void skipTestCheck() { - Assume.assumeTrue(AdlStorageConfiguration.isContractTestEnabled()); + private void skipTestCheck() { + assumeTrue(AdlStorageConfiguration.isContractTestEnabled()); } } \ No newline at end of file diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestNativeAzureFileSystemContractEmulator.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestNativeAzureFileSystemContractEmulator.java index b4a71f6178..217ca81550 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestNativeAzureFileSystemContractEmulator.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestNativeAzureFileSystemContractEmulator.java @@ -18,33 +18,31 @@ package org.apache.hadoop.fs.azure; +import static org.junit.Assume.assumeNotNull; + import org.apache.hadoop.fs.FileSystemContractBaseTest; +import org.junit.After; +import org.junit.Before; public class TestNativeAzureFileSystemContractEmulator extends FileSystemContractBaseTest { private AzureBlobStorageTestAccount testAccount; - @Override - protected void setUp() throws Exception { + @Before + public void setUp() throws Exception { testAccount = AzureBlobStorageTestAccount.createForEmulator(); if (testAccount != null) { fs = testAccount.getFileSystem(); } + assumeNotNull(fs); } - @Override - protected void tearDown() throws Exception { + @After + public void tearDown() throws Exception { if (testAccount != null) { testAccount.cleanup(); testAccount = null; fs = null; } } - - @Override - protected void runTest() throws Throwable { - if (testAccount != null) { - super.runTest(); - } - } } diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestNativeAzureFileSystemContractLive.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestNativeAzureFileSystemContractLive.java index 0d7b9ad2c2..b546009426 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestNativeAzureFileSystemContractLive.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestNativeAzureFileSystemContractLive.java @@ -18,23 +18,29 @@ package org.apache.hadoop.fs.azure; +import static org.junit.Assume.assumeNotNull; + import org.apache.hadoop.fs.FileSystemContractBaseTest; +import org.junit.After; +import org.junit.Before; import org.junit.Ignore; +import org.junit.Test; public class TestNativeAzureFileSystemContractLive extends FileSystemContractBaseTest { private AzureBlobStorageTestAccount testAccount; - @Override - protected void setUp() throws Exception { + @Before + public void setUp() throws Exception { testAccount = AzureBlobStorageTestAccount.create(); if (testAccount != null) { fs = testAccount.getFileSystem(); } + assumeNotNull(fs); } - @Override - protected void tearDown() throws Exception { + @After + public void tearDown() throws Exception { if (testAccount != null) { testAccount.cleanup(); testAccount = null; @@ -42,35 +48,33 @@ protected void tearDown() throws Exception { } } - @Override - protected void runTest() throws Throwable { - if (testAccount != null) { - super.runTest(); - } - } - /** * The following tests are failing on Azure and the Azure * file system code needs to be modified to make them pass. * A separate work item has been opened for this. */ @Ignore + @Test public void testMoveFileUnderParent() throws Throwable { } @Ignore + @Test public void testRenameFileToSelf() throws Throwable { } - + @Ignore + @Test public void testRenameChildDirForbidden() throws Exception { } - + @Ignore + @Test public void testMoveDirUnderParent() throws Throwable { } - + @Ignore + @Test public void testRenameDirToSelf() throws Throwable { } } diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestNativeAzureFileSystemContractMocked.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestNativeAzureFileSystemContractMocked.java index 6d3df256bf..f458bb3db4 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestNativeAzureFileSystemContractMocked.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestNativeAzureFileSystemContractMocked.java @@ -19,13 +19,15 @@ package org.apache.hadoop.fs.azure; import org.apache.hadoop.fs.FileSystemContractBaseTest; +import org.junit.Before; import org.junit.Ignore; +import org.junit.Test; public class TestNativeAzureFileSystemContractMocked extends FileSystemContractBaseTest { - @Override - protected void setUp() throws Exception { + @Before + public void setUp() throws Exception { fs = AzureBlobStorageTestAccount.createMock().getFileSystem(); } @@ -35,22 +37,27 @@ protected void setUp() throws Exception { * A separate work item has been opened for this. */ @Ignore + @Test public void testMoveFileUnderParent() throws Throwable { } @Ignore + @Test public void testRenameFileToSelf() throws Throwable { } @Ignore + @Test public void testRenameChildDirForbidden() throws Exception { } @Ignore + @Test public void testMoveDirUnderParent() throws Throwable { } @Ignore + @Test public void testRenameDirToSelf() throws Throwable { } } diff --git a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestNativeAzureFileSystemContractPageBlobLive.java b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestNativeAzureFileSystemContractPageBlobLive.java index 3c3b782dac..2a88ad27cf 100644 --- a/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestNativeAzureFileSystemContractPageBlobLive.java +++ b/hadoop-tools/hadoop-azure/src/test/java/org/apache/hadoop/fs/azure/TestNativeAzureFileSystemContractPageBlobLive.java @@ -20,7 +20,11 @@ import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystemContractBaseTest; +import org.junit.After; +import static org.junit.Assume.assumeNotNull; +import org.junit.Before; import org.junit.Ignore; +import org.junit.Test; public class TestNativeAzureFileSystemContractPageBlobLive extends FileSystemContractBaseTest { @@ -39,16 +43,17 @@ private AzureBlobStorageTestAccount createTestAccount() return AzureBlobStorageTestAccount.create(conf); } - @Override - protected void setUp() throws Exception { + @Before + public void setUp() throws Exception { testAccount = createTestAccount(); if (testAccount != null) { fs = testAccount.getFileSystem(); } + assumeNotNull(fs); } - @Override - protected void tearDown() throws Exception { + @After + public void tearDown() throws Exception { if (testAccount != null) { testAccount.cleanup(); testAccount = null; @@ -56,35 +61,33 @@ protected void tearDown() throws Exception { } } - @Override - protected void runTest() throws Throwable { - if (testAccount != null) { - super.runTest(); - } - } - /** * The following tests are failing on Azure and the Azure * file system code needs to be modified to make them pass. * A separate work item has been opened for this. */ @Ignore + @Test public void testMoveFileUnderParent() throws Throwable { } @Ignore + @Test public void testRenameFileToSelf() throws Throwable { } @Ignore + @Test public void testRenameChildDirForbidden() throws Exception { } @Ignore + @Test public void testMoveDirUnderParent() throws Throwable { } @Ignore + @Test public void testRenameDirToSelf() throws Throwable { } } diff --git a/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemContract.java b/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemContract.java index 76716b2e50..baeb4f3676 100644 --- a/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemContract.java +++ b/hadoop-tools/hadoop-openstack/src/test/java/org/apache/hadoop/fs/swift/TestSwiftFileSystemContract.java @@ -26,6 +26,10 @@ import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.swift.snative.SwiftNativeFileSystem; import org.apache.hadoop.fs.swift.util.SwiftTestUtils; +import org.junit.Before; +import org.junit.Test; + +import static org.junit.Assert.*; import java.io.IOException; import java.net.URI; @@ -54,8 +58,8 @@ protected boolean filesystemIsCaseSensitive() { return false; } - @Override - protected void setUp() throws Exception { + @Before + public void setUp() throws Exception { final URI uri = getFilesystemURI(); final Configuration conf = new Configuration(); fs = createSwiftFS(); @@ -67,7 +71,6 @@ protected void setUp() throws Exception { fs = null; throw e; } - super.setUp(); } protected URI getFilesystemURI() throws URISyntaxException, IOException { @@ -80,7 +83,7 @@ protected SwiftNativeFileSystem createSwiftFS() throws IOException { return swiftNativeFileSystem; } - @Override + @Test public void testMkdirsFailsForSubdirectoryOfExistingFile() throws Exception { Path testDir = path("/test/hadoop"); assertFalse(fs.exists(testDir)); @@ -114,7 +117,7 @@ public void testMkdirsFailsForSubdirectoryOfExistingFile() throws Exception { } - @Override + @Test public void testWriteReadAndDeleteEmptyFile() throws Exception { try { super.testWriteReadAndDeleteEmptyFile(); @@ -123,11 +126,12 @@ public void testWriteReadAndDeleteEmptyFile() throws Exception { } } - @Override + @Test public void testMkdirsWithUmask() throws Exception { //unsupported } + @Test public void testZeroByteFilesAreFiles() throws Exception { // SwiftTestUtils.unsupported("testZeroByteFilesAreFiles"); }