diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractTestS3AEncryption.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractTestS3AEncryption.java index c117056c0f..23776f3164 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractTestS3AEncryption.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractTestS3AEncryption.java @@ -19,6 +19,7 @@ package org.apache.hadoop.fs.s3a; import java.io.IOException; +import java.nio.file.AccessDeniedException; import org.junit.Test; @@ -81,10 +82,20 @@ protected void requireEncryptedFileSystem() { skipIfEncryptionTestsDisabled(getFileSystem().getConf()); } + /** + * Skipping tests when running against mandatory encryption bucket + * which allows only certain encryption method. + * S3 throw AmazonS3Exception with status 403 AccessDenied + * then it is translated into AccessDeniedException by S3AUtils.translateException(...) + */ @Override public void setup() throws Exception { - super.setup(); - requireEncryptedFileSystem(); + try { + super.setup(); + requireEncryptedFileSystem(); + } catch (AccessDeniedException e) { + skip("Bucket does not allow " + getSSEAlgorithm() + " encryption method"); + } } /** diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesSSECDiskBlocks.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesSSECDiskBlocks.java index d2e04d928d..3dc64ea3b4 100644 --- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesSSECDiskBlocks.java +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesSSECDiskBlocks.java @@ -23,6 +23,9 @@ import org.apache.hadoop.fs.s3a.S3AEncryptionMethods; import org.apache.hadoop.fs.s3a.S3ATestUtils; +import java.nio.file.AccessDeniedException; + +import static org.apache.hadoop.fs.contract.ContractTestUtils.skip; import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_ALGORITHM; import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_KEY; import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM; @@ -41,10 +44,20 @@ public class ITestS3AHugeFilesSSECDiskBlocks private static final String KEY_1 = "4niV/jPK5VFRHY+KNb6wtqYd4xXyMgdJ9XQJpcQUVbs="; + /** + * Skipping tests when running against mandatory encryption bucket + * which allows only certain encryption method. + * S3 throw AmazonS3Exception with status 403 AccessDenied + * then it is translated into AccessDeniedException by S3AUtils.translateException(...) + */ @Override public void setup() throws Exception { - super.setup(); - skipIfEncryptionTestsDisabled(getConfiguration()); + try { + super.setup(); + skipIfEncryptionTestsDisabled(getConfiguration()); + } catch (AccessDeniedException e) { + skip("Bucket does not allow " + S3AEncryptionMethods.SSE_C + " encryption method"); + } } @SuppressWarnings("deprecation")