diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java
index f0c00c4cde..07f0e81619 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FSMainOperationsBaseTest.java
@@ -102,7 +102,9 @@ public void setUp() throws Exception {
@After
public void tearDown() throws Exception {
- fSys.delete(new Path(getAbsoluteTestRootPath(fSys), new Path("test")), true);
+ if (fSys != null) {
+ fSys.delete(new Path(getAbsoluteTestRootPath(fSys), new Path("test")), true);
+ }
}
@@ -192,7 +194,7 @@ public void testWorkingDirectory() throws Exception {
@Test
public void testWDAbsolute() throws IOException {
- Path absoluteDir = new Path(fSys.getUri() + "/test/existingDir");
+ Path absoluteDir = getTestRootPath(fSys, "test/existingDir");
fSys.mkdirs(absoluteDir);
fSys.setWorkingDirectory(absoluteDir);
Assert.assertEquals(absoluteDir, fSys.getWorkingDirectory());
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java
index 4c90490b09..6897a0d194 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FileContextMainOperationsBaseTest.java
@@ -81,6 +81,12 @@ public abstract class FileContextMainOperationsBaseTest {
protected final FileContextTestHelper fileContextTestHelper =
createFileContextHelper();
+ /**
+ * Create the test helper.
+ * Important: this is invoked during the construction of the base class,
+ * so is very brittle.
+ * @return a test helper.
+ */
protected FileContextTestHelper createFileContextHelper() {
return new FileContextTestHelper();
}
@@ -107,7 +113,7 @@ public boolean accept(Path file) {
private static final byte[] data = getFileData(numBlocks,
getDefaultBlockSize());
-
+
@Before
public void setUp() throws Exception {
File testBuildData = GenericTestUtils.getRandomizedTestDir();
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFSMainOperationsLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFSMainOperationsLocalFileSystem.java
index e3932da05c..e53e2b7e01 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFSMainOperationsLocalFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestFSMainOperationsLocalFileSystem.java
@@ -21,10 +21,6 @@
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
-import org.junit.After;
-import org.junit.Assert;
-import org.junit.Before;
-import org.junit.Test;
public class TestFSMainOperationsLocalFileSystem extends FSMainOperationsBaseTest {
@@ -32,12 +28,6 @@ public class TestFSMainOperationsLocalFileSystem extends FSMainOperationsBaseTes
protected FileSystem createFileSystem() throws IOException {
return FileSystem.getLocal(new Configuration());
}
-
- @Override
- @Before
- public void setUp() throws Exception {
- super.setUp();
- }
static Path wd = null;
@Override
@@ -46,19 +36,5 @@ protected Path getDefaultWorkingDirectory() throws IOException {
wd = FileSystem.getLocal(new Configuration()).getWorkingDirectory();
return wd;
}
-
- @Override
- @After
- public void tearDown() throws Exception {
- super.tearDown();
- }
-
- @Test
- @Override
- public void testWDAbsolute() throws IOException {
- Path absoluteDir = getTestRootPath(fSys, "test/existingDir");
- fSys.mkdirs(absoluteDir);
- fSys.setWorkingDirectory(absoluteDir);
- Assert.assertEquals(absoluteDir, fSys.getWorkingDirectory());
- }
+
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFSMainOperationsLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFSMainOperationsLocalFileSystem.java
index 12687fd8b9..fc0d74b649 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFSMainOperationsLocalFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/viewfs/TestFSMainOperationsLocalFileSystem.java
@@ -53,14 +53,5 @@ public void tearDown() throws Exception {
super.tearDown();
ViewFileSystemTestSetup.tearDown(this, fcTarget);
}
-
- @Test
- @Override
- public void testWDAbsolute() throws IOException {
- Path absoluteDir = getTestRootPath(fSys, "test/existingDir");
- fSys.mkdirs(absoluteDir);
- fSys.setWorkingDirectory(absoluteDir);
- Assert.assertEquals(absoluteDir, fSys.getWorkingDirectory());
- }
}
diff --git a/hadoop-tools/hadoop-aws/pom.xml b/hadoop-tools/hadoop-aws/pom.xml
index 25646c0737..595413edc4 100644
--- a/hadoop-tools/hadoop-aws/pom.xml
+++ b/hadoop-tools/hadoop-aws/pom.xml
@@ -56,6 +56,11 @@
unset
+
+
+ 00
+
+ unset
@@ -115,14 +120,8 @@
${test.build.data}/${surefire.forkNumber}${test.build.dir}/${surefire.forkNumber}${hadoop.tmp.dir}/${surefire.forkNumber}
+ job-${job.id}-fork-000${surefire.forkNumber}
-
-
-
-
-
-
- fork-000${surefire.forkNumber}${fs.s3a.scale.test.enabled}${fs.s3a.scale.test.huge.filesize}
@@ -163,7 +162,7 @@
- fork-000${surefire.forkNumber}
+ job-${job.id}-fork-000${surefire.forkNumber}${fs.s3a.scale.test.enabled}${fs.s3a.scale.test.huge.filesize}
@@ -174,14 +173,14 @@
${test.integration.timeout}${fs.s3a.prefetch.enabled}
+
+ ${root.tests.enabled}
+
-
-
-
-
+
@@ -228,6 +227,9 @@
${fs.s3a.directory.marker.audit}${fs.s3a.prefetch.enabled}
+
+ ${root.tests.enabled}
+ job-${job.id}
@@ -289,6 +291,7 @@
${fs.s3a.directory.marker.audit}${fs.s3a.prefetch.enabled}
+ job-${job.id}${fs.s3a.scale.test.timeout}
diff --git a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/testing.md b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/testing.md
index 24c4c322ca..45d1c84765 100644
--- a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/testing.md
+++ b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/testing.md
@@ -43,7 +43,7 @@ is a more specific lie and harder to make. And, if you get caught out: you
lose all credibility with the project.
You don't need to test from a VM within the AWS infrastructure; with the
-`-Dparallel=tests` option the non-scale tests complete in under ten minutes.
+`-Dparallel-tests` option the non-scale tests complete in under twenty minutes.
Because the tests clean up after themselves, they are also designed to be low
cost. It's neither hard nor expensive to run the tests; if you can't,
there's no guarantee your patch works. The reviewers have enough to do, and
@@ -539,12 +539,51 @@ Otherwise, set a large timeout in `fs.s3a.scale.test.timeout`
The tests are executed in an order to only clean up created files after
the end of all the tests. If the tests are interrupted, the test data will remain.
+## Testing through continuous integration
+
+### Parallel CI builds.
+For CI testing of the module, including the integration tests,
+it is generally necessary to support testing multiple PRs simultaneously.
+
+To do this
+1. A job ID must be supplied in the `job.id` property, so each job works on an isolated directory
+ tree. This should be a number or unique string, which will be used within a path element, so
+ must only contain characters valid in an S3/hadoop path element.
+2. Root directory tests need to be disabled by setting `fs.s3a.root.tests.enabled` to
+ `false`, either in the command line to maven or in the XML configurations.
+
+```
+mvn verify -T 1C -Dparallel-tests -DtestsThreadCount=14 -Dscale -Dfs.s3a.root.tests.enabled=false -Djob.id=001
+```
+
+This parallel execution feature is only for isolated builds sharing a single S3 bucket; it does
+not support parallel builds and tests from the same local source tree.
+
+Without the root tests being executed, set up a scheduled job to purge the test bucket of all
+data on a regular basis, to keep costs down.
+The easiest way to do this is to have a bucket lifecycle rule for the bucket to delete all files more than a few days old,
+alongside one to abort all pending uploads more than 24h old.
+
+
+### Securing CI builds
+
+It's clearly unsafe to have CI infrastructure testing PRs submitted to apache github account
+with AWS credentials -which is why it isn't done by the Yetus-initiated builds.
+
+Anyone doing this privately should:
+* Review incoming patches before triggering the tests.
+* Have a dedicated IAM role with restricted access to the test bucket, any KMS keys used, and the
+ external bucket containing the CSV test file.
+* Have a build process which generates short-lived session credentials for this role.
+* Run the tests in an EC2 VM/container which collects the restricted IAM credentials
+ from the IAM instance/container credentials provider.
+
## Load tests.
-Some are designed to overload AWS services with more
+Some tests are designed to overload AWS services with more
requests per second than an AWS account is permitted.
-The operation of these test maybe observable to other users of the same
+The operation of these tests may be observable to other users of the same
account -especially if they are working in the AWS region to which the
tests are targeted.
@@ -556,6 +595,10 @@ They do not run automatically: they must be explicitly run from the command line
Look in the source for these and reads the Javadocs before executing.
+Note: one fear here was that asking for two many session/role credentials in a short period
+of time would actually lock an account out of a region. It doesn't: it simply triggers
+throttling of STS requests.
+
## Testing against non-AWS S3 Stores.
The S3A filesystem is designed to work with S3 stores which implement
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractRootDir.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractRootDir.java
index 5335de1b32..cd5c078a9e 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractRootDir.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/contract/s3a/ITestS3AContractRootDir.java
@@ -27,6 +27,8 @@
import org.apache.hadoop.fs.contract.AbstractFSContract;
import org.apache.hadoop.fs.s3a.S3AFileSystem;
+import static org.apache.hadoop.fs.s3a.S3ATestUtils.maybeSkipRootTests;
+
/**
* root dir operations against an S3 bucket.
*/
@@ -36,6 +38,12 @@ public class ITestS3AContractRootDir extends
private static final Logger LOG =
LoggerFactory.getLogger(ITestS3AContractRootDir.class);
+ @Override
+ public void setup() throws Exception {
+ super.setup();
+ maybeSkipRootTests(getFileSystem().getConf());
+ }
+
@Override
protected AbstractFSContract createContract(Configuration conf) {
return new S3AContract(conf);
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java
index 73bba9d62c..a3b994054e 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AConfiguration.java
@@ -386,8 +386,10 @@ public void shouldBeAbleToSwitchOnS3PathStyleAccessViaConfigProperty()
s3Configuration.pathStyleAccessEnabled());
byte[] file = ContractTestUtils.toAsciiByteArray("test file");
ContractTestUtils.writeAndRead(fs,
- new Path("/path/style/access/testFile"), file, file.length,
- (int) conf.getLongBytes(Constants.FS_S3A_BLOCK_SIZE, file.length), false, true);
+ createTestPath(new Path("/path/style/access/testFile")),
+ file, file.length,
+ (int) conf.getLongBytes(Constants.FS_S3A_BLOCK_SIZE, file.length),
+ false, true);
} catch (final AWSRedirectException e) {
LOG.error("Caught exception: ", e);
// Catch/pass standard path style access behaviour when live bucket
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEC.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEC.java
index 321f831c0a..d22de3b06d 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEC.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEC.java
@@ -20,13 +20,9 @@
import java.io.IOException;
import java.nio.file.AccessDeniedException;
-import java.util.Arrays;
-import java.util.Collection;
import org.assertj.core.api.Assertions;
import org.junit.Test;
-import org.junit.runner.RunWith;
-import org.junit.runners.Parameterized;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
@@ -37,19 +33,14 @@
import static org.apache.hadoop.fs.contract.ContractTestUtils.dataset;
import static org.apache.hadoop.fs.contract.ContractTestUtils.touch;
-import static org.apache.hadoop.fs.s3a.Constants.DIRECTORY_MARKER_POLICY;
-import static org.apache.hadoop.fs.s3a.Constants.DIRECTORY_MARKER_POLICY_DELETE;
-import static org.apache.hadoop.fs.s3a.Constants.DIRECTORY_MARKER_POLICY_KEEP;
import static org.apache.hadoop.fs.s3a.Constants.ETAG_CHECKSUM_ENABLED;
import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_ALGORITHM;
import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_KEY;
import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM;
import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_KEY;
-import static org.apache.hadoop.fs.s3a.S3ATestUtils.createTestPath;
-import static org.apache.hadoop.fs.s3a.S3ATestUtils.disableFilesystemCaching;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.getTestBucketName;
+import static org.apache.hadoop.fs.s3a.S3ATestUtils.maybeSkipRootTests;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.removeBaseAndBucketOverrides;
-import static org.apache.hadoop.fs.s3a.S3ATestUtils.skipIfEncryptionTestsDisabled;
import static org.apache.hadoop.test.LambdaTestUtils.intercept;
/**
@@ -60,7 +51,6 @@
* Equally "vexing" has been the optimizations of getFileStatus(), wherein
* LIST comes before HEAD path + /
*/
-@RunWith(Parameterized.class)
public class ITestS3AEncryptionSSEC extends AbstractTestS3AEncryption {
private static final String SERVICE_AMAZON_S3_STATUS_CODE_403
@@ -75,31 +65,11 @@ public class ITestS3AEncryptionSSEC extends AbstractTestS3AEncryption {
= "msdo3VvvZznp66Gth58a91Hxe/UpExMkwU9BHkIjfW8=";
private static final int TEST_FILE_LEN = 2048;
- /**
- * Parameterization.
- */
- @Parameterized.Parameters(name = "{0}")
- public static Collection