HADOOP-14334. S3 SSEC tests to downgrade when running against a mandatory encryption object store (#3870)
Contributed by Monthon Klongklaew
This commit is contained in:
parent
39efbc6b6f
commit
b27732c69b
@ -19,6 +19,7 @@
|
|||||||
package org.apache.hadoop.fs.s3a;
|
package org.apache.hadoop.fs.s3a;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
import java.nio.file.AccessDeniedException;
|
||||||
|
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
@ -81,10 +82,20 @@ protected void requireEncryptedFileSystem() {
|
|||||||
skipIfEncryptionTestsDisabled(getFileSystem().getConf());
|
skipIfEncryptionTestsDisabled(getFileSystem().getConf());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Skipping tests when running against mandatory encryption bucket
|
||||||
|
* which allows only certain encryption method.
|
||||||
|
* S3 throw AmazonS3Exception with status 403 AccessDenied
|
||||||
|
* then it is translated into AccessDeniedException by S3AUtils.translateException(...)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void setup() throws Exception {
|
public void setup() throws Exception {
|
||||||
|
try {
|
||||||
super.setup();
|
super.setup();
|
||||||
requireEncryptedFileSystem();
|
requireEncryptedFileSystem();
|
||||||
|
} catch (AccessDeniedException e) {
|
||||||
|
skip("Bucket does not allow " + getSSEAlgorithm() + " encryption method");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -23,6 +23,9 @@
|
|||||||
import org.apache.hadoop.fs.s3a.S3AEncryptionMethods;
|
import org.apache.hadoop.fs.s3a.S3AEncryptionMethods;
|
||||||
import org.apache.hadoop.fs.s3a.S3ATestUtils;
|
import org.apache.hadoop.fs.s3a.S3ATestUtils;
|
||||||
|
|
||||||
|
import java.nio.file.AccessDeniedException;
|
||||||
|
|
||||||
|
import static org.apache.hadoop.fs.contract.ContractTestUtils.skip;
|
||||||
import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_ALGORITHM;
|
import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_ALGORITHM;
|
||||||
import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_KEY;
|
import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_KEY;
|
||||||
import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM;
|
import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM;
|
||||||
@ -41,10 +44,20 @@ public class ITestS3AHugeFilesSSECDiskBlocks
|
|||||||
private static final String KEY_1
|
private static final String KEY_1
|
||||||
= "4niV/jPK5VFRHY+KNb6wtqYd4xXyMgdJ9XQJpcQUVbs=";
|
= "4niV/jPK5VFRHY+KNb6wtqYd4xXyMgdJ9XQJpcQUVbs=";
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Skipping tests when running against mandatory encryption bucket
|
||||||
|
* which allows only certain encryption method.
|
||||||
|
* S3 throw AmazonS3Exception with status 403 AccessDenied
|
||||||
|
* then it is translated into AccessDeniedException by S3AUtils.translateException(...)
|
||||||
|
*/
|
||||||
@Override
|
@Override
|
||||||
public void setup() throws Exception {
|
public void setup() throws Exception {
|
||||||
|
try {
|
||||||
super.setup();
|
super.setup();
|
||||||
skipIfEncryptionTestsDisabled(getConfiguration());
|
skipIfEncryptionTestsDisabled(getConfiguration());
|
||||||
|
} catch (AccessDeniedException e) {
|
||||||
|
skip("Bucket does not allow " + S3AEncryptionMethods.SSE_C + " encryption method");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("deprecation")
|
@SuppressWarnings("deprecation")
|
||||||
|
Loading…
Reference in New Issue
Block a user