diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
index 968de79945..a799e883bc 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
@@ -1002,6 +1002,8 @@ public class CommonConfigurationKeysPublic {
"ssl.keystore.pass$",
"fs.s3.*[Ss]ecret.?[Kk]ey",
"fs.s3a.*.server-side-encryption.key",
+ "fs.s3a.encryption.algorithm",
+ "fs.s3a.encryption.key",
"fs.azure\\.account.key.*",
"credential$",
"oauth.*secret",
diff --git a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
index e1ddedd540..4d289a71b5 100644
--- a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
+++ b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml
@@ -676,6 +676,8 @@
ssl.keystore.pass$
fs.s3a.server-side-encryption.key
fs.s3a.*.server-side-encryption.key
+ fs.s3a.encryption.algorithm
+ fs.s3a.encryption.key
fs.s3a.secret.key
fs.s3a.*.secret.key
fs.s3a.session.key
@@ -1578,20 +1580,22 @@
- fs.s3a.server-side-encryption-algorithm
- Specify a server-side encryption algorithm for s3a: file system.
- Unset by default. It supports the following values: 'AES256' (for SSE-S3),
- 'SSE-KMS' and 'SSE-C'.
+ fs.s3a.encryption.algorithm
+ Specify a server-side encryption or client-side
+ encryption algorithm for s3a: file system. Unset by default. It supports the
+ following values: 'AES256' (for SSE-S3), 'SSE-KMS', 'SSE-C', and 'CSE-KMS'
- fs.s3a.server-side-encryption.key
- Specific encryption key to use if fs.s3a.server-side-encryption-algorithm
- has been set to 'SSE-KMS' or 'SSE-C'. In the case of SSE-C, the value of this property
- should be the Base64 encoded key. If you are using SSE-KMS and leave this property empty,
- you'll be using your default's S3 KMS key, otherwise you should set this property to
- the specific KMS key id.
+ fs.s3a.encryption.key
+ Specific encryption key to use if fs.s3a.encryption.algorithm
+ has been set to 'SSE-KMS', 'SSE-C' or 'CSE-KMS'. In the case of SSE-C
+ , the value of this property should be the Base64 encoded key. If you are
+ using SSE-KMS and leave this property empty, you'll be using your default's
+ S3 KMS key, otherwise you should set this property to the specific KMS key
+ id. In case of 'CSE-KMS' this value needs to be the AWS-KMS Key ID
+ generated from AWS console.
diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java
index 8bd02919fe..56ea7d0006 100644
--- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java
+++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java
@@ -421,11 +421,12 @@ private Constants() {
public static final long DEFAULT_PURGE_EXISTING_MULTIPART_AGE = 86400;
/**
- * s3 server-side encryption or s3 client side encryption method, see
+ * s3 server-side encryption, see
* {@link S3AEncryptionMethods} for valid options.
*
* {@value}
*/
+ @Deprecated
public static final String SERVER_SIDE_ENCRYPTION_ALGORITHM =
"fs.s3a.server-side-encryption-algorithm";
@@ -449,9 +450,32 @@ private Constants() {
* May be set within a JCEKS file.
* Value: "{@value}".
*/
+ @Deprecated
public static final String SERVER_SIDE_ENCRYPTION_KEY =
"fs.s3a.server-side-encryption.key";
+ /**
+ * Set S3-server side encryption(SSE) or S3-Client side encryption(CSE)
+ * algorithm. Check {@link S3AEncryptionMethods} for valid options.
+ *
+ * value: {@value}
+ */
+ public static final String S3_ENCRYPTION_ALGORITHM =
+ "fs.s3a.encryption.algorithm";
+
+ /**
+ * Set S3-SSE or S3-CSE encryption Key if required.
+ *
+ * Note:
+ *
+ * - In case of S3-CSE this value needs to be set for CSE to work.
+ * - In case of S3-SSE follow {@link #SERVER_SIDE_ENCRYPTION_KEY}
+ *
+ * value:{@value}
+ */
+ public static final String S3_ENCRYPTION_KEY =
+ "fs.s3a.encryption.key";
+
/**
* List of custom Signers. The signer class will be loaded, and the signer
* name will be associated with this signer class in the S3 SDK.
diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/DefaultS3ClientFactory.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/DefaultS3ClientFactory.java
index 2abef630a8..441ae70803 100644
--- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/DefaultS3ClientFactory.java
+++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/DefaultS3ClientFactory.java
@@ -58,8 +58,8 @@
import static org.apache.hadoop.fs.s3a.Constants.AWS_S3_CENTRAL_REGION;
import static org.apache.hadoop.fs.s3a.Constants.EXPERIMENTAL_AWS_INTERNAL_THROTTLING;
import static org.apache.hadoop.fs.s3a.Constants.EXPERIMENTAL_AWS_INTERNAL_THROTTLING_DEFAULT;
-import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM;
-import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_KEY;
+import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_ALGORITHM;
+import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_KEY;
import static org.apache.hadoop.fs.s3a.S3AUtils.translateException;
/**
@@ -93,6 +93,9 @@ public class DefaultS3ClientFactory extends Configured
"S3A filesystem client is using"
+ " the SDK region resolution chain.";
+ /** Exactly once log to inform about ignoring the AWS-SDK Warnings for CSE. */
+ private static final LogExactlyOnce IGNORE_CSE_WARN = new LogExactlyOnce(LOG);
+
/**
* Create the client by preparing the AwsConf configuration
* and then invoking {@code buildAmazonS3Client()}.
@@ -125,7 +128,7 @@ public AmazonS3 createS3Client(
try {
if (S3AEncryptionMethods.getMethod(S3AUtils.
- lookupPassword(conf, SERVER_SIDE_ENCRYPTION_ALGORITHM, null))
+ lookupPassword(conf, S3_ENCRYPTION_ALGORITHM, null))
.equals(S3AEncryptionMethods.CSE_KMS)) {
return buildAmazonS3EncryptionClient(
awsConf,
@@ -162,10 +165,10 @@ protected AmazonS3 buildAmazonS3EncryptionClient(
//CSE-KMS Method
String kmsKeyId = S3AUtils.lookupPassword(conf,
- SERVER_SIDE_ENCRYPTION_KEY, null);
+ S3_ENCRYPTION_KEY, null);
// Check if kmsKeyID is not null
Preconditions.checkArgument(kmsKeyId != null, "CSE-KMS method "
- + "requires KMS key ID. Use " + SERVER_SIDE_ENCRYPTION_KEY
+ + "requires KMS key ID. Use " + S3_ENCRYPTION_KEY
+ " property to set it. ");
EncryptionMaterialsProvider materialsProvider =
@@ -191,6 +194,8 @@ protected AmazonS3 buildAmazonS3EncryptionClient(
}
builder.withCryptoConfiguration(cryptoConfigurationV2);
client = builder.build();
+ IGNORE_CSE_WARN.info("S3 client-side encryption enabled: Ignore S3-CSE "
+ + "Warnings.");
return client;
}
diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java
index fc31cfbe87..bc3e7ea5a5 100644
--- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java
+++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java
@@ -373,7 +373,13 @@ private static void addDeprecatedKeys() {
Configuration.DeprecationDelta[] deltas = {
new Configuration.DeprecationDelta(
FS_S3A_COMMITTER_STAGING_ABORT_PENDING_UPLOADS,
- FS_S3A_COMMITTER_ABORT_PENDING_UPLOADS)
+ FS_S3A_COMMITTER_ABORT_PENDING_UPLOADS),
+ new Configuration.DeprecationDelta(
+ SERVER_SIDE_ENCRYPTION_ALGORITHM,
+ S3_ENCRYPTION_ALGORITHM),
+ new Configuration.DeprecationDelta(
+ SERVER_SIDE_ENCRYPTION_KEY,
+ S3_ENCRYPTION_KEY)
};
if (deltas.length > 0) {
@@ -436,7 +442,8 @@ public void initialize(URI name, Configuration originalConf)
initializeStatisticsBinding();
// If CSE-KMS method is set then CSE is enabled.
isCSEEnabled = S3AUtils.lookupPassword(conf,
- SERVER_SIDE_ENCRYPTION_ALGORITHM, null) != null;
+ Constants.S3_ENCRYPTION_ALGORITHM, "")
+ .equals(S3AEncryptionMethods.CSE_KMS.getMethod());
LOG.debug("Client Side Encryption enabled: {}", isCSEEnabled);
setCSEGauge();
// Username is the current user at the time the FS was instantiated.
diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AUtils.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AUtils.java
index 16baa3c3b2..5b504a94ee 100644
--- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AUtils.java
+++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AUtils.java
@@ -125,14 +125,14 @@ public final class S3AUtils {
public static final String SSE_C_NO_KEY_ERROR =
S3AEncryptionMethods.SSE_C.getMethod()
+ " is enabled but no encryption key was declared in "
- + SERVER_SIDE_ENCRYPTION_KEY;
+ + Constants.S3_ENCRYPTION_KEY;
/**
* Encryption SSE-S3 is used but the caller also set an encryption key.
*/
public static final String SSE_S3_WITH_KEY_ERROR =
S3AEncryptionMethods.SSE_S3.getMethod()
+ " is enabled but an encryption key was set in "
- + SERVER_SIDE_ENCRYPTION_KEY;
+ + Constants.S3_ENCRYPTION_KEY;
public static final String EOF_MESSAGE_IN_XML_PARSER
= "Failed to sanitize XML document destined for handler class";
@@ -1581,9 +1581,9 @@ static void patchSecurityCredentialProviders(Configuration conf) {
public static String getS3EncryptionKey(String bucket,
Configuration conf) {
try {
- return lookupPassword(bucket, conf, SERVER_SIDE_ENCRYPTION_KEY);
+ return lookupPassword(bucket, conf, Constants.S3_ENCRYPTION_KEY);
} catch (IOException e) {
- LOG.error("Cannot retrieve " + SERVER_SIDE_ENCRYPTION_KEY, e);
+ LOG.error("Cannot retrieve " + Constants.S3_ENCRYPTION_KEY, e);
return "";
}
}
@@ -1603,7 +1603,7 @@ public static S3AEncryptionMethods getEncryptionAlgorithm(String bucket,
Configuration conf) throws IOException {
S3AEncryptionMethods encryptionMethod = S3AEncryptionMethods.getMethod(
lookupPassword(bucket, conf,
- SERVER_SIDE_ENCRYPTION_ALGORITHM));
+ Constants.S3_ENCRYPTION_ALGORITHM));
String encryptionKey = getS3EncryptionKey(bucket, conf);
int encryptionKeyLen =
StringUtils.isBlank(encryptionKey) ? 0 : encryptionKey.length();
diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/s3guard/S3GuardTool.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/s3guard/S3GuardTool.java
index ef47564355..b0b3a8c80e 100644
--- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/s3guard/S3GuardTool.java
+++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/s3guard/S3GuardTool.java
@@ -39,6 +39,7 @@
import java.util.stream.Collectors;
import com.amazonaws.services.s3.model.MultipartUpload;
+
import org.apache.hadoop.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.thirdparty.com.google.common.base.Preconditions;
import org.slf4j.Logger;
@@ -54,6 +55,7 @@
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FilterFileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.fs.s3a.Constants;
import org.apache.hadoop.fs.s3a.MultipartUtils;
import org.apache.hadoop.fs.s3a.S3AFileStatus;
import org.apache.hadoop.fs.s3a.S3AFileSystem;
@@ -1348,7 +1350,7 @@ public int run(String[] args, PrintStream out)
ENDPOINT,
StringUtils.isNotEmpty(endpoint) ? endpoint : "(unset)");
String encryption =
- printOption(out, "\tEncryption", SERVER_SIDE_ENCRYPTION_ALGORITHM,
+ printOption(out, "\tEncryption", Constants.S3_ENCRYPTION_ALGORITHM,
"none");
printOption(out, "\tInput seek policy", INPUT_FADVISE, INPUT_FADV_NORMAL);
printOption(out, "\tChange Detection Source", CHANGE_DETECT_SOURCE,
diff --git a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/encryption.md b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/encryption.md
index 5fa6a3096b..ccdfeeda7e 100644
--- a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/encryption.md
+++ b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/encryption.md
@@ -146,12 +146,12 @@ There is no extra cost for storing data with this option.
### Enabling SSE-S3
To write S3-SSE encrypted files, the value of
-`fs.s3a.server-side-encryption-algorithm` must be set to that of
+`fs.s3a.encryption.algorithm` must be set to that of
the encryption mechanism used in `core-site`; currently only `AES256` is supported.
```xml
- fs.s3a.server-side-encryption-algorithm
+ fs.s3a.encryption.algorithm
AES256
```
@@ -177,7 +177,7 @@ The AWS KMS [can be used encrypt data on S3uploaded data](http://docs.aws.amazon
When uploading data encrypted with SSE-KMS, the sequence is as follows.
-1. The S3A client must declare a specific CMK in the property `fs.s3a.server-side-encryption.key`, or leave
+1. The S3A client must declare a specific CMK in the property `fs.s3a.encryption.key`, or leave
it blank to use the default configured for that region.
1. The S3A client uploads all the data as normal, now including encryption information.
@@ -221,32 +221,32 @@ they can be increased through the AWS console.
### Enabling SSE-KMS
-To enable SSE-KMS, the property `fs.s3a.server-side-encryption-algorithm` must be set to `SSE-KMS` in `core-site`:
+To enable SSE-KMS, the property `fs.s3a.encryption.algorithm` must be set to `SSE-KMS` in `core-site`:
```xml
- fs.s3a.server-side-encryption-algorithm
+ fs.s3a.encryption.algorithm
SSE-KMS
```
-The ID of the specific key used to encrypt the data should also be set in the property `fs.s3a.server-side-encryption.key`:
+The ID of the specific key used to encrypt the data should also be set in the property `fs.s3a.encryption.key`:
```xml
- fs.s3a.server-side-encryption.key
+ fs.s3a.encryption.key
arn:aws:kms:us-west-2:360379543683:key/071a86ff-8881-4ba0-9230-95af6d01ca01
```
Organizations may define a default key in the Amazon KMS; if a default key is set,
-then it will be used whenever SSE-KMS encryption is chosen and the value of `fs.s3a.server-side-encryption.key` is empty.
+then it will be used whenever SSE-KMS encryption is chosen and the value of `fs.s3a.encryption.key` is empty.
### the S3A `fs.s3a.encryption.key` key only affects created files
-With SSE-KMS, the S3A client option `fs.s3a.server-side-encryption.key` sets the
+With SSE-KMS, the S3A client option `fs.s3a.encryption.key` sets the
key to be used when new files are created. When reading files, this key,
-and indeed the value of `fs.s3a.server-side-encryption-algorithme` is ignored:
+and indeed the value of `fs.s3a.encryption.algorithm` is ignored:
S3 will attempt to retrieve the key and decrypt the file based on the create-time settings.
This means that
@@ -270,18 +270,18 @@ directory listings do not fail with "Bad Request" errors.
### Enabling SSE-C
-To use SSE-C, the configuration option `fs.s3a.server-side-encryption-algorithm`
+To use SSE-C, the configuration option `fs.s3a.encryption.algorithm`
must be set to `SSE-C`, and a base-64 encoding of the key placed in
-`fs.s3a.server-side-encryption.key`.
+`fs.s3a.encryption.key`.
```xml
- fs.s3a.server-side-encryption-algorithm
+ fs.s3a.encryption.algorithm
SSE-C
- fs.s3a.server-side-encryption.key
+ fs.s3a.encryption.key
SGVscCwgSSdtIHRyYXBwZWQgaW5zaWRlIGEgYmFzZS02NC1jb2RlYyE=
```
@@ -290,11 +290,11 @@ All clients must share this same key.
### The `fs.s3a.encryption.key` value is used to read and write data
-With SSE-C, the S3A client option `fs.s3a.server-side-encryption.key` sets the
+With SSE-C, the S3A client option `fs.s3a.encryption.key` sets the
key to be used for both reading *and* writing data.
When reading any file written with SSE-C, the same key must be set
-in the property `fs.s3a.server-side-encryption.key`.
+in the property `fs.s3a.encryption.key`.
This is unlike SSE-S3 and SSE-KMS, where the information needed to
decode data is kept in AWS infrastructure.
@@ -618,8 +618,8 @@ clients where S3-CSE has not been enabled.
- Generate an AWS KMS Key ID from AWS console for your bucket, with same
region as the storage bucket.
- If already created, [view the kms key ID by these steps.](https://docs.aws.amazon.com/kms/latest/developerguide/find-cmk-id-arn.html)
-- Set `fs.s3a.server-side-encryption-algorithm=CSE-KMS`.
-- Set `fs.s3a.server-side-encryption.key=`.
+- Set `fs.s3a.encryption.algorithm=CSE-KMS`.
+- Set `fs.s3a.encryption.key=`.
KMS_KEY_ID:
@@ -634,18 +634,18 @@ For example:
- Alias name: `alias/ExampleAlias`
- Alias ARN: `arn:aws:kms:us-east-2:111122223333:alias/ExampleAlias`
-*Note:* If `fs.s3a.server-side-encryption-algorithm=CSE-KMS` is set,
-`fs.s3a.server-side-encryption.key=` property must be set for
+*Note:* If `fs.s3a.encryption.algorithm=CSE-KMS` is set,
+`fs.s3a.encryption.key=` property must be set for
S3-CSE to work.
```xml
- fs.s3a.server-side-encryption-algorithm
+ fs.s3a.encryption.algorithm
CSE-KMS
- fs.s3a.server-side-encryption.key
+ fs.s3a.encryption.key
${KMS_KEY_ID}
```
diff --git a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md
index 8b963c3b77..f4f7144f34 100644
--- a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md
+++ b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md
@@ -647,7 +647,7 @@ files in local or Hadoop filesystems, and including them in requests.
The S3A configuration options with sensitive data
(`fs.s3a.secret.key`, `fs.s3a.access.key`, `fs.s3a.session.token`
-and `fs.s3a.server-side-encryption.key`) can
+and `fs.s3a.encryption.key`) can
have their data saved to a binary file stored, with the values being read in
when the S3A filesystem URL is used for data access. The reference to this
credential provider then declared in the Hadoop configuration.
@@ -663,8 +663,8 @@ stores.
fs.s3a.access.key
fs.s3a.secret.key
fs.s3a.session.token
-fs.s3a.server-side-encryption.key
-fs.s3a.server-side-encryption-algorithm
+fs.s3a.encryption.key
+fs.s3a.encryption.algorithm
```
The first three are for authentication; the final two for
@@ -969,20 +969,23 @@ options are covered in [Testing](./testing.md).
- fs.s3a.server-side-encryption-algorithm
- Specify a server-side encryption algorithm for s3a: file system.
- Unset by default. It supports the following values: 'AES256' (for SSE-S3), 'SSE-KMS'
- and 'SSE-C'
+ fs.s3a.encryption.algorithm
+ Specify a server-side encryption or client-side
+ encryption algorithm for s3a: file system. Unset by default. It supports the
+ following values: 'AES256' (for SSE-S3), 'SSE-KMS', 'SSE-C', and 'CSE-KMS'
- fs.s3a.server-side-encryption.key
- Specific encryption key to use if fs.s3a.server-side-encryption-algorithm
- has been set to 'SSE-KMS' or 'SSE-C'. In the case of SSE-C, the value of this property
- should be the Base64 encoded key. If you are using SSE-KMS and leave this property empty,
- you'll be using your default's S3 KMS key, otherwise you should set this property to
- the specific KMS key id.
+ fs.s3a.encryption.key
+ Specific encryption key to use if fs.s3a.encryption.algorithm
+ has been set to 'SSE-KMS', 'SSE-C' or 'CSE-KMS'. In the case of SSE-C
+ , the value of this property should be the Base64 encoded key. If you are
+ using SSE-KMS and leave this property empty, you'll be using your default's
+ S3 KMS key, otherwise you should set this property to the specific KMS key
+ id. In case of 'CSE-KMS' this value needs to be the AWS-KMS Key ID
+ generated from AWS console.
+
@@ -1436,7 +1439,8 @@ Consider a JCEKS file with six keys:
```
fs.s3a.access.key
fs.s3a.secret.key
-fs.s3a.server-side-encryption-algorithm
+fs.s3a.encryption.algorithm
+fs.s3a.encryption.key
fs.s3a.bucket.nightly.access.key
fs.s3a.bucket.nightly.secret.key
fs.s3a.bucket.nightly.session.token
diff --git a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/testing.md b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/testing.md
index 69f589ba79..e0a90a243c 100644
--- a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/testing.md
+++ b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/testing.md
@@ -150,7 +150,7 @@ Example:
### Configuring S3a Encryption
For S3a encryption tests to run correctly, the
-`fs.s3a.server-side-encryption.key` must be configured in the s3a contract xml
+`fs.s3a.encryption.key` must be configured in the s3a contract xml
file or `auth-keys.xml` file with a AWS KMS encryption key arn as this value is
different for each AWS KMS. Please note this KMS key should be created in the
same region as your S3 bucket. Otherwise, you may get `KMS.NotFoundException`.
@@ -159,13 +159,13 @@ Example:
```xml
- fs.s3a.server-side-encryption.key
+ fs.s3a.encryption.key
arn:aws:kms:us-west-2:360379543683:key/071a86ff-8881-4ba0-9230-95af6d01ca01
```
You can also force all the tests to run with a specific SSE encryption method
-by configuring the property `fs.s3a.server-side-encryption-algorithm` in the s3a
+by configuring the property `fs.s3a.encryption.algorithm` in the s3a
contract file.
### Default Encryption
@@ -1466,7 +1466,7 @@ as it may take a couple of SDK updates before it is ready.
1. Do a clean build and rerun all the `hadoop-aws` tests, with and without the `-Ds3guard -Ddynamo` options.
This includes the `-Pscale` set, with a role defined for the assumed role tests.
in `fs.s3a.assumed.role.arn` for testing assumed roles,
- and `fs.s3a.server-side-encryption.key` for encryption, for full coverage.
+ and `fs.s3a.encryption.key` for encryption, for full coverage.
If you can, scale up the scale tests.
1. Run the `ILoadTest*` load tests from your IDE or via maven through
`mvn verify -Dtest=skip -Dit.test=ILoadTest\*` ; look for regressions in performance
@@ -1482,6 +1482,9 @@ as it may take a couple of SDK updates before it is ready.
Examine the `target/dependencies.txt` file to verify that no new
artifacts have unintentionally been declared as dependencies
of the shaded `aws-java-sdk-bundle` artifact.
+1. Run a full AWS-test suite with S3 client-side encryption enabled by
+ setting `fs.s3a.encryption.algorithm` to 'CSE-KMS' and setting up AWS-KMS
+ Key ID in `fs.s3a.encryption.key`.
### Basic command line regression testing
diff --git a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/troubleshooting_s3a.md b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/troubleshooting_s3a.md
index 33dd165499..6f55d9effe 100644
--- a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/troubleshooting_s3a.md
+++ b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/troubleshooting_s3a.md
@@ -1209,12 +1209,12 @@ KMS key ID is required for CSE-KMS to encrypt data, not providing one leads
```
2021-07-07 11:33:04,550 WARN fs.FileSystem: Failed to initialize fileystem
s3a://ap-south-cse/: java.lang.IllegalArgumentException: CSE-KMS
-method requires KMS key ID. Use fs.s3a.server-side-encryption.key property to set it.
--ls: CSE-KMS method requires KMS key ID. Use fs.s3a.server-side-encryption.key property to
+method requires KMS key ID. Use fs.s3a.encryption.key property to set it.
+-ls: CSE-KMS method requires KMS key ID. Use fs.s3a.encryption.key property to
set it.
```
-set `fs.s3a.server-side-encryption.key=` generated through AWS console.
+set `fs.s3a.encryption.key=` generated through AWS console.
### `com.amazonaws.services.kms.model.IncorrectKeyException` The key ID in the request does not identify a CMK that can perform this operation.
@@ -1354,7 +1354,7 @@ work.
### com.amazonaws.services.kms.model.NotFoundException: Invalid keyId
-If the value in `fs.s3a.server-side-encryption.key` property, does not exist
+If the value in `fs.s3a.encryption.key` property, does not exist
/valid in AWS KMS CMK(Customer managed keys), then this error would be seen.
```
@@ -1390,7 +1390,7 @@ Caused by: com.amazonaws.services.kms.model.NotFoundException: Invalid keyId abc
... 49 more
```
-Check if `fs.s3a.server-side-encryption.key` is set correctly and matches the
+Check if `fs.s3a.encryption.key` is set correctly and matches the
same on AWS console.
### com.amazonaws.services.kms.model.AWSKMSException: User: is not authorized to perform : kms :GenerateDataKey on resource:
@@ -1431,7 +1431,7 @@ User: arn:aws:iam::152813717728:user/ is not authorized to perform: kms:Ge
```
The user trying to use the KMS Key ID should have the right permissions to access
-(encrypt/decrypt) using the AWS KMS Key used via `fs.s3a.server-side-encryption.key`.
+(encrypt/decrypt) using the AWS KMS Key used via `fs.s3a.encryption.key`.
If not, then add permission(or IAM role) in "Key users" section by selecting the
AWS-KMS CMK Key on AWS console.
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3AMockTest.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3AMockTest.java
index afc444f20a..5765fe471c 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3AMockTest.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractS3AMockTest.java
@@ -59,7 +59,7 @@ public void setup() throws Exception {
fs = new S3AFileSystem();
URI uri = URI.create(FS_S3A + "://" + BUCKET);
// unset S3CSE property from config to avoid pathIOE.
- conf.unset(SERVER_SIDE_ENCRYPTION_ALGORITHM);
+ conf.unset(Constants.S3_ENCRYPTION_ALGORITHM);
fs.initialize(uri, conf);
s3 = fs.getAmazonS3ClientForTesting("mocking");
}
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractTestS3AEncryption.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractTestS3AEncryption.java
index 7704fa8372..8e3208ce09 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractTestS3AEncryption.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/AbstractTestS3AEncryption.java
@@ -28,6 +28,8 @@
import org.apache.hadoop.fs.s3a.auth.delegation.EncryptionSecrets;
import static org.apache.hadoop.fs.contract.ContractTestUtils.*;
+import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_ALGORITHM;
+import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_KEY;
import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM;
import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_KEY;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.removeBaseAndBucketOverrides;
@@ -52,17 +54,20 @@ protected Configuration createConfiguration() {
/**
* This removes the encryption settings from the
* configuration and then sets the
- * fs.s3a.server-side-encryption-algorithm value to
+ * fs.s3a.encryption.algorithm value to
* be that of {@code getSSEAlgorithm()}.
* Called in {@code createConfiguration()}.
* @param conf configuration to patch.
*/
+ @SuppressWarnings("deprecation")
protected void patchConfigurationEncryptionSettings(
final Configuration conf) {
removeBaseAndBucketOverrides(conf,
+ S3_ENCRYPTION_ALGORITHM,
+ S3_ENCRYPTION_KEY,
SERVER_SIDE_ENCRYPTION_ALGORITHM,
SERVER_SIDE_ENCRYPTION_KEY);
- conf.set(SERVER_SIDE_ENCRYPTION_ALGORITHM,
+ conf.set(S3_ENCRYPTION_ALGORITHM,
getSSEAlgorithm().getMethod());
}
@@ -159,7 +164,7 @@ protected String createFilename(String name) {
protected void assertEncrypted(Path path) throws IOException {
//S3 will return full arn of the key, so specify global arn in properties
String kmsKeyArn = this.getConfiguration().
- getTrimmed(SERVER_SIDE_ENCRYPTION_KEY);
+ getTrimmed(S3_ENCRYPTION_KEY);
S3AEncryptionMethods algorithm = getSSEAlgorithm();
EncryptionTestUtils.assertEncrypted(getFileSystem(),
path,
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/EncryptionTestUtils.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/EncryptionTestUtils.java
index 4fc03e0b2e..4013e9db29 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/EncryptionTestUtils.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/EncryptionTestUtils.java
@@ -27,7 +27,7 @@
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
-import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_KEY;
+import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_KEY;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
@@ -51,7 +51,7 @@ private EncryptionTestUtils() {
*/
public static String convertKeyToMd5(FileSystem fs) {
String base64Key = fs.getConf().getTrimmed(
- SERVER_SIDE_ENCRYPTION_KEY
+ S3_ENCRYPTION_KEY
);
byte[] key = Base64.decodeBase64(base64Key);
byte[] md5 = DigestUtils.md5(key);
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AClientSideEncryption.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AClientSideEncryption.java
index 880f032d47..bb052ed3d1 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AClientSideEncryption.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AClientSideEncryption.java
@@ -43,8 +43,8 @@
import static org.apache.hadoop.fs.contract.ContractTestUtils.verifyFileContents;
import static org.apache.hadoop.fs.contract.ContractTestUtils.writeDataset;
import static org.apache.hadoop.fs.s3a.Constants.MULTIPART_MIN_SIZE;
-import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM;
-import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_KEY;
+import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_ALGORITHM;
+import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_KEY;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.assume;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.getTestPropertyBool;
import static org.apache.hadoop.test.LambdaTestUtils.intercept;
@@ -203,8 +203,8 @@ public void testEncryptionEnabledAndDisabledFS() throws Exception {
Path encryptedFilePath = path(getMethodName() + "cse");
// Initialize a CSE disabled FS.
- cseDisabledConf.unset(SERVER_SIDE_ENCRYPTION_ALGORITHM);
- cseDisabledConf.unset(SERVER_SIDE_ENCRYPTION_KEY);
+ cseDisabledConf.unset(S3_ENCRYPTION_ALGORITHM);
+ cseDisabledConf.unset(S3_ENCRYPTION_KEY);
cseDisabledFS.initialize(getFileSystem().getUri(),
cseDisabledConf);
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AClientSideEncryptionKms.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AClientSideEncryptionKms.java
index 35f648fd34..085c0f9ee3 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AClientSideEncryptionKms.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AClientSideEncryptionKms.java
@@ -28,8 +28,8 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.s3a.impl.HeaderProcessing;
+import static org.apache.hadoop.fs.s3a.S3ATestUtils.skipIfEncryptionNotSet;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.skipIfEncryptionTestsDisabled;
-import static org.apache.hadoop.fs.s3a.S3ATestUtils.skipIfKmsKeyIdIsNotSet;
/**
* Testing the S3 CSE - KMS method.
@@ -55,7 +55,8 @@ protected Configuration createConfiguration() {
@Override
protected void maybeSkipTest() {
skipIfEncryptionTestsDisabled(getConfiguration());
- skipIfKmsKeyIdIsNotSet(getConfiguration());
+ // skip the test if CSE-KMS or KMS key is not set.
+ skipIfEncryptionNotSet(getConfiguration(), S3AEncryptionMethods.CSE_KMS);
}
@Override
@@ -71,7 +72,7 @@ protected void assertEncrypted(Path path) throws IOException {
// Assert content encryption algo for KMS, is present in the
// materials description and KMS key ID isn't.
String keyId =
- getConfiguration().get(Constants.SERVER_SIDE_ENCRYPTION_KEY);
+ getConfiguration().get(Constants.S3_ENCRYPTION_KEY);
Assertions.assertThat(processHeader(fsXAttrs,
xAttrPrefix + Headers.MATERIALS_DESCRIPTION))
.describedAs("Materials Description should contain the content "
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionAlgorithmValidation.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionAlgorithmValidation.java
index 40fa0cb719..7e6aeb2eb0 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionAlgorithmValidation.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionAlgorithmValidation.java
@@ -42,23 +42,23 @@ public class ITestS3AEncryptionAlgorithmValidation
public void testEncryptionAlgorithmSetToDES() throws Throwable {
//skip tests if they aren't enabled
assumeEnabled();
- intercept(IOException.class, "Unknown Server Side algorithm DES", () -> {
+ intercept(IOException.class, "Unknown encryption algorithm DES", () -> {
- Configuration conf = super.createConfiguration();
- //DES is an invalid encryption algorithm
- conf.set(Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM, "DES");
- S3AContract contract = (S3AContract) createContract(conf);
- contract.init();
- //extract the test FS
- FileSystem fileSystem = contract.getTestFileSystem();
- assertNotNull("null filesystem", fileSystem);
- URI fsURI = fileSystem.getUri();
- LOG.info("Test filesystem = {} implemented by {}", fsURI, fileSystem);
- assertEquals("wrong filesystem of " + fsURI,
- contract.getScheme(), fsURI.getScheme());
- fileSystem.initialize(fsURI, conf);
- throw new Exception("Do not reach here");
- });
+ Configuration conf = super.createConfiguration();
+ //DES is an invalid encryption algorithm
+ conf.set(Constants.S3_ENCRYPTION_ALGORITHM, "DES");
+ S3AContract contract = (S3AContract) createContract(conf);
+ contract.init();
+ //extract the test FS
+ FileSystem fileSystem = contract.getTestFileSystem();
+ assertNotNull("null filesystem", fileSystem);
+ URI fsURI = fileSystem.getUri();
+ LOG.info("Test filesystem = {} implemented by {}", fsURI, fileSystem);
+ assertEquals("wrong filesystem of " + fsURI,
+ contract.getScheme(), fsURI.getScheme());
+ fileSystem.initialize(fsURI, conf);
+ return fileSystem;
+ });
}
@Test
@@ -67,25 +67,25 @@ public void testEncryptionAlgorithmSSECWithNoEncryptionKey() throws
//skip tests if they aren't enabled
assumeEnabled();
intercept(IllegalArgumentException.class, "The value of property " +
- Constants.SERVER_SIDE_ENCRYPTION_KEY + " must not be null", () -> {
+ Constants.S3_ENCRYPTION_KEY + " must not be null", () -> {
- Configuration conf = super.createConfiguration();
- //SSE-C must be configured with an encryption key
- conf.set(Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM,
- S3AEncryptionMethods.SSE_C.getMethod());
- conf.set(Constants.SERVER_SIDE_ENCRYPTION_KEY, null);
- S3AContract contract = (S3AContract) createContract(conf);
- contract.init();
- //extract the test FS
- FileSystem fileSystem = contract.getTestFileSystem();
- assertNotNull("null filesystem", fileSystem);
- URI fsURI = fileSystem.getUri();
- LOG.info("Test filesystem = {} implemented by {}", fsURI, fileSystem);
- assertEquals("wrong filesystem of " + fsURI,
- contract.getScheme(), fsURI.getScheme());
- fileSystem.initialize(fsURI, conf);
- throw new Exception("Do not reach here");
- });
+ Configuration conf = super.createConfiguration();
+ //SSE-C must be configured with an encryption key
+ conf.set(Constants.S3_ENCRYPTION_ALGORITHM,
+ S3AEncryptionMethods.SSE_C.getMethod());
+ conf.set(Constants.S3_ENCRYPTION_KEY, null);
+ S3AContract contract = (S3AContract) createContract(conf);
+ contract.init();
+ //extract the test FS
+ FileSystem fileSystem = contract.getTestFileSystem();
+ assertNotNull("null filesystem", fileSystem);
+ URI fsURI = fileSystem.getUri();
+ LOG.info("Test filesystem = {} implemented by {}", fsURI, fileSystem);
+ assertEquals("wrong filesystem of " + fsURI,
+ contract.getScheme(), fsURI.getScheme());
+ fileSystem.initialize(fsURI, conf);
+ return fileSystem;
+ });
}
@Test
@@ -93,23 +93,23 @@ public void testEncryptionAlgorithmSSECWithBlankEncryptionKey() throws
Throwable {
intercept(IOException.class, S3AUtils.SSE_C_NO_KEY_ERROR, () -> {
- Configuration conf = super.createConfiguration();
- //SSE-C must be configured with an encryption key
- conf.set(Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM,
- S3AEncryptionMethods.SSE_C.getMethod());
- conf.set(Constants.SERVER_SIDE_ENCRYPTION_KEY, "");
- S3AContract contract = (S3AContract) createContract(conf);
- contract.init();
- //extract the test FS
- FileSystem fileSystem = contract.getTestFileSystem();
- assertNotNull("null filesystem", fileSystem);
- URI fsURI = fileSystem.getUri();
- LOG.info("Test filesystem = {} implemented by {}", fsURI, fileSystem);
- assertEquals("wrong filesystem of " + fsURI,
- contract.getScheme(), fsURI.getScheme());
- fileSystem.initialize(fsURI, conf);
- throw new Exception("Do not reach here");
- });
+ Configuration conf = super.createConfiguration();
+ //SSE-C must be configured with an encryption key
+ conf.set(Constants.S3_ENCRYPTION_ALGORITHM,
+ S3AEncryptionMethods.SSE_C.getMethod());
+ conf.set(Constants.S3_ENCRYPTION_KEY, "");
+ S3AContract contract = (S3AContract) createContract(conf);
+ contract.init();
+ //extract the test FS
+ FileSystem fileSystem = contract.getTestFileSystem();
+ assertNotNull("null filesystem", fileSystem);
+ URI fsURI = fileSystem.getUri();
+ LOG.info("Test filesystem = {} implemented by {}", fsURI, fileSystem);
+ assertEquals("wrong filesystem of " + fsURI,
+ contract.getScheme(), fsURI.getScheme());
+ fileSystem.initialize(fsURI, conf);
+ return fileSystem;
+ });
}
@Test
@@ -119,24 +119,24 @@ public void testEncryptionAlgorithmSSES3WithEncryptionKey() throws
assumeEnabled();
intercept(IOException.class, S3AUtils.SSE_S3_WITH_KEY_ERROR, () -> {
- Configuration conf = super.createConfiguration();
- //SSE-S3 cannot be configured with an encryption key
- conf.set(Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM,
- S3AEncryptionMethods.SSE_S3.getMethod());
- conf.set(Constants.SERVER_SIDE_ENCRYPTION_KEY,
- "4niV/jPK5VFRHY+KNb6wtqYd4xXyMgdJ9XQJpcQUVbs=");
- S3AContract contract = (S3AContract) createContract(conf);
- contract.init();
- //extract the test FS
- FileSystem fileSystem = contract.getTestFileSystem();
- assertNotNull("null filesystem", fileSystem);
- URI fsURI = fileSystem.getUri();
- LOG.info("Test filesystem = {} implemented by {}", fsURI, fileSystem);
- assertEquals("wrong filesystem of " + fsURI,
- contract.getScheme(), fsURI.getScheme());
- fileSystem.initialize(fsURI, conf);
- throw new Exception("Do not reach here");
- });
+ Configuration conf = super.createConfiguration();
+ //SSE-S3 cannot be configured with an encryption key
+ conf.set(Constants.S3_ENCRYPTION_ALGORITHM,
+ S3AEncryptionMethods.SSE_S3.getMethod());
+ conf.set(Constants.S3_ENCRYPTION_KEY,
+ "4niV/jPK5VFRHY+KNb6wtqYd4xXyMgdJ9XQJpcQUVbs=");
+ S3AContract contract = (S3AContract) createContract(conf);
+ contract.init();
+ //extract the test FS
+ FileSystem fileSystem = contract.getTestFileSystem();
+ assertNotNull("null filesystem", fileSystem);
+ URI fsURI = fileSystem.getUri();
+ LOG.info("Test filesystem = {} implemented by {}", fsURI, fileSystem);
+ assertEquals("wrong filesystem of " + fsURI,
+ contract.getScheme(), fsURI.getScheme());
+ fileSystem.initialize(fsURI, conf);
+ return fileSystem;
+ });
}
/**
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEC.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEC.java
index 852b49ac1e..ff46e981ea 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEC.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEC.java
@@ -42,6 +42,8 @@
import static org.apache.hadoop.fs.s3a.Constants.DIRECTORY_MARKER_POLICY_KEEP;
import static org.apache.hadoop.fs.s3a.Constants.ETAG_CHECKSUM_ENABLED;
import static org.apache.hadoop.fs.s3a.Constants.S3_METADATA_STORE_IMPL;
+import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_ALGORITHM;
+import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_KEY;
import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM;
import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_KEY;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.*;
@@ -105,6 +107,7 @@ public ITestS3AEncryptionSSEC(final String name,
this.keepMarkers = keepMarkers;
}
+ @SuppressWarnings("deprecation")
@Override
protected Configuration createConfiguration() {
Configuration conf = super.createConfiguration();
@@ -121,15 +124,17 @@ protected Configuration createConfiguration() {
removeBaseAndBucketOverrides(bucketName, conf,
DIRECTORY_MARKER_POLICY,
ETAG_CHECKSUM_ENABLED,
+ S3_ENCRYPTION_ALGORITHM,
+ S3_ENCRYPTION_KEY,
SERVER_SIDE_ENCRYPTION_ALGORITHM,
SERVER_SIDE_ENCRYPTION_KEY);
conf.set(DIRECTORY_MARKER_POLICY,
keepMarkers
? DIRECTORY_MARKER_POLICY_KEEP
: DIRECTORY_MARKER_POLICY_DELETE);
- conf.set(SERVER_SIDE_ENCRYPTION_ALGORITHM,
+ conf.set(S3_ENCRYPTION_ALGORITHM,
getSSEAlgorithm().getMethod());
- conf.set(SERVER_SIDE_ENCRYPTION_KEY, KEY_1);
+ conf.set(S3_ENCRYPTION_KEY, KEY_1);
conf.setBoolean(ETAG_CHECKSUM_ENABLED, true);
return conf;
}
@@ -251,8 +256,8 @@ public void testListEncryptedDir() throws Exception {
fsKeyB.listFiles(pathABC, false);
Configuration conf = this.createConfiguration();
- conf.unset(SERVER_SIDE_ENCRYPTION_ALGORITHM);
- conf.unset(SERVER_SIDE_ENCRYPTION_KEY);
+ conf.unset(S3_ENCRYPTION_ALGORITHM);
+ conf.unset(S3_ENCRYPTION_KEY);
S3AContract contract = (S3AContract) createContract(conf);
contract.init();
@@ -286,8 +291,8 @@ public void testListStatusEncryptedDir() throws Exception {
//Now try it with an unencrypted filesystem.
Configuration conf = createConfiguration();
- conf.unset(SERVER_SIDE_ENCRYPTION_ALGORITHM);
- conf.unset(SERVER_SIDE_ENCRYPTION_KEY);
+ conf.unset(S3_ENCRYPTION_ALGORITHM);
+ conf.unset(S3_ENCRYPTION_KEY);
S3AContract contract = (S3AContract) createContract(conf);
contract.init();
@@ -385,7 +390,7 @@ public void testChecksumRequiresReadAccess() throws Throwable {
private S3AFileSystem createNewFileSystemWithSSECKey(String sseCKey) throws
IOException {
Configuration conf = this.createConfiguration();
- conf.set(SERVER_SIDE_ENCRYPTION_KEY, sseCKey);
+ conf.set(S3_ENCRYPTION_KEY, sseCKey);
S3AContract contract = (S3AContract) createContract(conf);
contract.init();
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEKMSDefaultKey.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEKMSDefaultKey.java
index 855c3b77ac..68ab5bd9e8 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEKMSDefaultKey.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEKMSDefaultKey.java
@@ -40,7 +40,7 @@ public class ITestS3AEncryptionSSEKMSDefaultKey
@Override
protected Configuration createConfiguration() {
Configuration conf = super.createConfiguration();
- conf.set(Constants.SERVER_SIDE_ENCRYPTION_KEY, "");
+ conf.set(Constants.S3_ENCRYPTION_KEY, "");
return conf;
}
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEKMSUserDefinedKey.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEKMSUserDefinedKey.java
index 3a8cf7a11d..c281ae1047 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEKMSUserDefinedKey.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSEKMSUserDefinedKey.java
@@ -18,13 +18,11 @@
package org.apache.hadoop.fs.s3a;
-import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
-import static org.apache.hadoop.fs.contract.ContractTestUtils.skip;
-import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM;
-import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_KEY;
+import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_KEY;
import static org.apache.hadoop.fs.s3a.S3AEncryptionMethods.SSE_KMS;
+import static org.apache.hadoop.fs.s3a.S3ATestUtils.skipIfEncryptionNotSet;
/**
* Concrete class that extends {@link AbstractTestS3AEncryption}
@@ -38,15 +36,11 @@ public class ITestS3AEncryptionSSEKMSUserDefinedKey
protected Configuration createConfiguration() {
// get the KMS key for this test.
Configuration c = new Configuration();
- String kmsKey = c.get(SERVER_SIDE_ENCRYPTION_KEY);
- if (StringUtils.isBlank(kmsKey) || !c.get(SERVER_SIDE_ENCRYPTION_ALGORITHM)
- .equals(S3AEncryptionMethods.CSE_KMS.name())) {
- skip(SERVER_SIDE_ENCRYPTION_KEY + " is not set for " +
- SSE_KMS.getMethod() + " or CSE-KMS algorithm is used instead of "
- + "SSE-KMS");
- }
+ String kmsKey = c.get(S3_ENCRYPTION_KEY);
+ // skip the test if SSE-KMS or KMS key not set.
+ skipIfEncryptionNotSet(c, getSSEAlgorithm());
Configuration conf = super.createConfiguration();
- conf.set(SERVER_SIDE_ENCRYPTION_KEY, kmsKey);
+ conf.set(S3_ENCRYPTION_KEY, kmsKey);
return conf;
}
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSES3.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSES3.java
index 33a252a68b..93b8814962 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSES3.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionSSES3.java
@@ -32,7 +32,7 @@ protected Configuration createConfiguration() {
S3ATestUtils.disableFilesystemCaching(conf);
//must specify encryption key as empty because SSE-S3 does not allow it,
//nor can it be null.
- conf.set(Constants.SERVER_SIDE_ENCRYPTION_KEY, "");
+ conf.set(Constants.S3_ENCRYPTION_KEY, "");
return conf;
}
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionWithDefaultS3Settings.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionWithDefaultS3Settings.java
index c7a62a39c9..0f4882553e 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionWithDefaultS3Settings.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AEncryptionWithDefaultS3Settings.java
@@ -33,11 +33,12 @@
import static org.apache.hadoop.fs.contract.ContractTestUtils.dataset;
import static org.apache.hadoop.fs.contract.ContractTestUtils.skip;
import static org.apache.hadoop.fs.contract.ContractTestUtils.writeDataset;
-import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM;
-import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_KEY;
+import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_ALGORITHM;
+import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_KEY;
import static org.apache.hadoop.fs.s3a.EncryptionTestUtils.AWS_KMS_SSE_ALGORITHM;
import static org.apache.hadoop.fs.s3a.S3AEncryptionMethods.SSE_KMS;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.removeBaseAndBucketOverrides;
+import static org.apache.hadoop.fs.s3a.S3ATestUtils.skipIfEncryptionNotSet;
/**
* Concrete class that extends {@link AbstractTestS3AEncryption}
@@ -56,21 +57,15 @@ public void setup() throws Exception {
// get the KMS key for this test.
S3AFileSystem fs = getFileSystem();
Configuration c = fs.getConf();
- String kmsKey = c.get(SERVER_SIDE_ENCRYPTION_KEY);
- if (StringUtils.isBlank(kmsKey) || !c.get(SERVER_SIDE_ENCRYPTION_ALGORITHM)
- .equals(S3AEncryptionMethods.CSE_KMS.name())) {
- skip(SERVER_SIDE_ENCRYPTION_KEY + " is not set for " +
- SSE_KMS.getMethod() + " or CSE-KMS algorithm is used instead of "
- + "SSE-KMS");
- }
+ skipIfEncryptionNotSet(c, getSSEAlgorithm());
}
@Override
protected void patchConfigurationEncryptionSettings(
final Configuration conf) {
removeBaseAndBucketOverrides(conf,
- SERVER_SIDE_ENCRYPTION_ALGORITHM);
- conf.set(SERVER_SIDE_ENCRYPTION_ALGORITHM,
+ S3_ENCRYPTION_ALGORITHM);
+ conf.set(S3_ENCRYPTION_ALGORITHM,
getSSEAlgorithm().getMethod());
}
@@ -94,7 +89,7 @@ protected S3AEncryptionMethods getSSEAlgorithm() {
protected void assertEncrypted(Path path) throws IOException {
S3AFileSystem fs = getFileSystem();
Configuration c = fs.getConf();
- String kmsKey = c.getTrimmed(SERVER_SIDE_ENCRYPTION_KEY);
+ String kmsKey = c.getTrimmed(S3_ENCRYPTION_KEY);
EncryptionTestUtils.assertEncrypted(fs, path, SSE_KMS, kmsKey);
}
@@ -142,7 +137,7 @@ public void testEncryptionOverRename2() throws Throwable {
// fs2 conf will always use SSE-KMS
Configuration fs2Conf = new Configuration(fs.getConf());
- fs2Conf.set(SERVER_SIDE_ENCRYPTION_ALGORITHM,
+ fs2Conf.set(S3_ENCRYPTION_ALGORITHM,
S3AEncryptionMethods.SSE_KMS.getMethod());
try (FileSystem kmsFS = FileSystem.newInstance(fs.getUri(), fs2Conf)) {
Path targetDir = path("target");
@@ -150,7 +145,7 @@ public void testEncryptionOverRename2() throws Throwable {
ContractTestUtils.rename(kmsFS, src, targetDir);
Path renamedFile = new Path(targetDir, src.getName());
ContractTestUtils.verifyFileContents(fs, renamedFile, data);
- String kmsKey = fs2Conf.getTrimmed(SERVER_SIDE_ENCRYPTION_KEY);
+ String kmsKey = fs2Conf.getTrimmed(S3_ENCRYPTION_KEY);
// we assert that the renamed file has picked up the KMS key of our FS
EncryptionTestUtils.assertEncrypted(fs, renamedFile, SSE_KMS, kmsKey);
}
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AMiscOperations.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AMiscOperations.java
index daefa783ae..b4d2527a46 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AMiscOperations.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/ITestS3AMiscOperations.java
@@ -46,6 +46,8 @@
import static org.apache.hadoop.fs.contract.ContractTestUtils.assertLacksPathCapabilities;
import static org.apache.hadoop.fs.contract.ContractTestUtils.createFile;
import static org.apache.hadoop.fs.contract.ContractTestUtils.touch;
+import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_ALGORITHM;
+import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_KEY;
import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM;
import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_KEY;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.removeBaseAndBucketOverrides;
@@ -68,10 +70,13 @@ public void setup() throws Exception {
enableChecksums(true);
}
+ @SuppressWarnings("deprecation")
@Override
protected Configuration createConfiguration() {
final Configuration conf = super.createConfiguration();
removeBaseAndBucketOverrides(conf,
+ S3_ENCRYPTION_ALGORITHM,
+ S3_ENCRYPTION_KEY,
SERVER_SIDE_ENCRYPTION_ALGORITHM,
SERVER_SIDE_ENCRYPTION_KEY);
return conf;
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java
index 695fc286ff..707cf356ac 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/S3ATestUtils.java
@@ -248,7 +248,7 @@ public static FileContext createTestFileContext(Configuration conf)
*/
private static void skipIfS3GuardAndS3CSEEnabled(Configuration conf) {
String encryptionMethod =
- conf.getTrimmed(SERVER_SIDE_ENCRYPTION_ALGORITHM, "");
+ conf.getTrimmed(Constants.S3_ENCRYPTION_ALGORITHM, "");
String metaStore = conf.getTrimmed(S3_METADATA_STORE_IMPL, "");
if (encryptionMethod.equals(S3AEncryptionMethods.CSE_KMS.getMethod()) &&
!metaStore.equals(S3GUARD_METASTORE_NULL)) {
@@ -1533,14 +1533,20 @@ public static S3AFileStatus innerGetFileStatus(
}
/**
- * Skip a test if CSE KMS key id is not set.
+ * Skip a test if encryption algorithm or encryption key is not set.
*
* @param configuration configuration to probe.
*/
- public static void skipIfKmsKeyIdIsNotSet(Configuration configuration) {
- if (configuration.get(
- SERVER_SIDE_ENCRYPTION_KEY) == null) {
- skip("AWS KMS key id is not set");
+ public static void skipIfEncryptionNotSet(Configuration configuration,
+ S3AEncryptionMethods s3AEncryptionMethod) {
+ // if S3 encryption algorithm is not set to desired method or AWS encryption
+ // key is not set, then skip.
+ if (!configuration.getTrimmed(S3_ENCRYPTION_ALGORITHM, "")
+ .equals(s3AEncryptionMethod.getMethod())
+ || configuration.get(Constants.S3_ENCRYPTION_KEY) == null) {
+ skip(S3_ENCRYPTION_KEY + " is not set for " + s3AEncryptionMethod
+ .getMethod() + " or " + S3_ENCRYPTION_ALGORITHM + " is not set to "
+ + s3AEncryptionMethod.getMethod());
}
}
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestSSEConfiguration.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestSSEConfiguration.java
index 273cf8b7c2..6985fa44c3 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestSSEConfiguration.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestSSEConfiguration.java
@@ -104,10 +104,10 @@ public void testSSEKeyFromCredentialProvider() throws Exception {
// set up conf to have a cred provider
final Configuration conf = confWithProvider();
String key = "provisioned";
- setProviderOption(conf, SERVER_SIDE_ENCRYPTION_KEY, key);
+ setProviderOption(conf, Constants.S3_ENCRYPTION_KEY, key);
// let's set the password in config and ensure that it uses the credential
// provider provisioned value instead.
- conf.set(SERVER_SIDE_ENCRYPTION_KEY, "keyInConfObject");
+ conf.set(Constants.S3_ENCRYPTION_KEY, "keyInConfObject");
String sseKey = getS3EncryptionKey(BUCKET, conf);
assertNotNull("Proxy password should not retrun null.", sseKey);
@@ -178,17 +178,20 @@ private S3AEncryptionMethods getAlgorithm(String algorithm, String key)
* @param key key, may be null
* @return the new config.
*/
+ @SuppressWarnings("deprecation")
private Configuration buildConf(String algorithm, String key) {
Configuration conf = emptyConf();
if (algorithm != null) {
- conf.set(SERVER_SIDE_ENCRYPTION_ALGORITHM, algorithm);
+ conf.set(Constants.S3_ENCRYPTION_ALGORITHM, algorithm);
} else {
conf.unset(SERVER_SIDE_ENCRYPTION_ALGORITHM);
+ conf.unset(Constants.S3_ENCRYPTION_ALGORITHM);
}
if (key != null) {
- conf.set(SERVER_SIDE_ENCRYPTION_KEY, key);
+ conf.set(Constants.S3_ENCRYPTION_KEY, key);
} else {
conf.unset(SERVER_SIDE_ENCRYPTION_KEY);
+ conf.unset(Constants.S3_ENCRYPTION_KEY);
}
return conf;
}
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/delegation/ITestSessionDelegationInFileystem.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/delegation/ITestSessionDelegationInFileystem.java
index 126f8b3c99..b3fc5de492 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/delegation/ITestSessionDelegationInFileystem.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/auth/delegation/ITestSessionDelegationInFileystem.java
@@ -38,6 +38,7 @@
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.contract.ContractTestUtils;
import org.apache.hadoop.fs.s3a.AWSCredentialProviderList;
+import org.apache.hadoop.fs.s3a.Constants;
import org.apache.hadoop.fs.s3a.DefaultS3ClientFactory;
import org.apache.hadoop.fs.s3a.Invoker;
import org.apache.hadoop.fs.s3a.S3AEncryptionMethods;
@@ -135,6 +136,7 @@ public Text getTokenKind() {
return SESSION_TOKEN_KIND;
}
+ @SuppressWarnings("deprecation")
@Override
protected Configuration createConfiguration() {
Configuration conf = super.createConfiguration();
@@ -142,11 +144,13 @@ protected Configuration createConfiguration() {
assumeSessionTestsEnabled(conf);
disableFilesystemCaching(conf);
String s3EncryptionMethod =
- conf.getTrimmed(SERVER_SIDE_ENCRYPTION_ALGORITHM,
+ conf.getTrimmed(Constants.S3_ENCRYPTION_ALGORITHM,
S3AEncryptionMethods.SSE_KMS.getMethod());
- String s3EncryptionKey = conf.getTrimmed(SERVER_SIDE_ENCRYPTION_KEY, "");
+ String s3EncryptionKey = conf.getTrimmed(Constants.S3_ENCRYPTION_KEY, "");
removeBaseAndBucketOverrides(conf,
DELEGATION_TOKEN_BINDING,
+ Constants.S3_ENCRYPTION_ALGORITHM,
+ Constants.S3_ENCRYPTION_KEY,
SERVER_SIDE_ENCRYPTION_ALGORITHM,
SERVER_SIDE_ENCRYPTION_KEY);
conf.set(HADOOP_SECURITY_AUTHENTICATION,
@@ -155,9 +159,9 @@ protected Configuration createConfiguration() {
conf.set(AWS_CREDENTIALS_PROVIDER, " ");
// switch to CSE-KMS(if specified) else SSE-KMS.
if (conf.getBoolean(KEY_ENCRYPTION_TESTS, true)) {
- conf.set(SERVER_SIDE_ENCRYPTION_ALGORITHM, s3EncryptionMethod);
+ conf.set(Constants.S3_ENCRYPTION_ALGORITHM, s3EncryptionMethod);
// KMS key ID a must if CSE-KMS is being tested.
- conf.set(SERVER_SIDE_ENCRYPTION_KEY, s3EncryptionKey);
+ conf.set(Constants.S3_ENCRYPTION_KEY, s3EncryptionKey);
}
// set the YARN RM up for YARN tests.
conf.set(YarnConfiguration.RM_PRINCIPAL, YARN_RM);
@@ -310,6 +314,7 @@ protected Credentials createDelegationTokens() throws IOException {
* Create a FS with a delegated token, verify it works as a filesystem,
* and that you can pick up the same DT from that FS too.
*/
+ @SuppressWarnings("deprecation")
@Test
public void testDelegatedFileSystem() throws Throwable {
describe("Delegation tokens can be passed to a new filesystem;"
@@ -348,6 +353,8 @@ public void testDelegatedFileSystem() throws Throwable {
// this is to simulate better a remote deployment.
removeBaseAndBucketOverrides(bucket, conf,
ACCESS_KEY, SECRET_KEY, SESSION_TOKEN,
+ Constants.S3_ENCRYPTION_ALGORITHM,
+ Constants.S3_ENCRYPTION_KEY,
SERVER_SIDE_ENCRYPTION_ALGORITHM,
SERVER_SIDE_ENCRYPTION_KEY,
DELEGATION_TOKEN_ROLE_ARN,
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/fileContext/ITestS3AFileContextStatistics.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/fileContext/ITestS3AFileContextStatistics.java
index 20ffd0f20d..ca8ce15996 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/fileContext/ITestS3AFileContextStatistics.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/fileContext/ITestS3AFileContextStatistics.java
@@ -32,8 +32,8 @@
import org.junit.Assert;
import org.junit.Before;
-import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM;
-import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_KEY;
+import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_ALGORITHM;
+import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_KEY;
import static org.apache.hadoop.fs.s3a.S3ATestConstants.KMS_KEY_GENERATION_REQUEST_PARAMS_BYTES_WRITTEN;
import static org.apache.hadoop.fs.s3a.impl.InternalConstants.CSE_PADDING_LENGTH;
@@ -86,9 +86,9 @@ protected void verifyReadBytes(FileSystem.Statistics stats) {
protected void verifyWrittenBytes(FileSystem.Statistics stats) {
//No extra bytes are written
long expectedBlockSize = blockSize;
- if (conf.get(SERVER_SIDE_ENCRYPTION_ALGORITHM, "")
+ if (conf.get(S3_ENCRYPTION_ALGORITHM, "")
.equals(S3AEncryptionMethods.CSE_KMS.getMethod())) {
- String keyId = conf.get(SERVER_SIDE_ENCRYPTION_KEY, "");
+ String keyId = conf.get(S3_ENCRYPTION_KEY, "");
// Adding padding length and KMS key generation bytes written.
expectedBlockSize += CSE_PADDING_LENGTH + keyId.getBytes().length +
KMS_KEY_GENERATION_REQUEST_PARAMS_BYTES_WRITTEN;
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/AbstractSTestS3AHugeFiles.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/AbstractSTestS3AHugeFiles.java
index 3e00917a2d..2dd92bbfc8 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/AbstractSTestS3AHugeFiles.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/AbstractSTestS3AHugeFiles.java
@@ -38,6 +38,7 @@
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.contract.ContractTestUtils;
+import org.apache.hadoop.fs.s3a.Constants;
import org.apache.hadoop.fs.s3a.S3AFileSystem;
import org.apache.hadoop.fs.s3a.S3ATestUtils;
import org.apache.hadoop.fs.s3a.Statistic;
@@ -402,7 +403,7 @@ public void test_030_postCreationAssertions() throws Throwable {
public void test_040_PositionedReadHugeFile() throws Throwable {
assumeHugeFileExists();
final String encryption = getConf().getTrimmed(
- SERVER_SIDE_ENCRYPTION_ALGORITHM);
+ Constants.S3_ENCRYPTION_ALGORITHM);
boolean encrypted = encryption != null;
if (encrypted) {
LOG.info("File is encrypted with algorithm {}", encryption);
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesEncryption.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesEncryption.java
index be51b2fa09..9325feb784 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesEncryption.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesEncryption.java
@@ -20,22 +20,19 @@
import java.io.IOException;
-import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.s3a.Constants;
import org.apache.hadoop.fs.s3a.EncryptionTestUtils;
-import org.apache.hadoop.fs.s3a.S3AEncryptionMethods;
import org.apache.hadoop.fs.s3a.S3AFileSystem;
-import static org.apache.hadoop.fs.contract.ContractTestUtils.skip;
-import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM;
-import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_KEY;
+import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_KEY;
import static org.apache.hadoop.fs.s3a.S3AEncryptionMethods.SSE_KMS;
+import static org.apache.hadoop.fs.s3a.S3ATestUtils.skipIfEncryptionNotSet;
/**
* Class to test SSE_KMS encryption settings for huge files.
- * Tests will only run if value of {@link Constants#SERVER_SIDE_ENCRYPTION_KEY}
+ * Tests will only run if value of {@link Constants#S3_ENCRYPTION_KEY}
* is set in the configuration. The testing bucket must be configured with this
* same key else test might fail.
*/
@@ -44,13 +41,7 @@ public class ITestS3AHugeFilesEncryption extends AbstractSTestS3AHugeFiles {
@Override
public void setup() throws Exception {
Configuration c = new Configuration();
- String kmsKey = c.get(SERVER_SIDE_ENCRYPTION_KEY);
- if (StringUtils.isBlank(kmsKey) || !c.get(SERVER_SIDE_ENCRYPTION_ALGORITHM)
- .equals(S3AEncryptionMethods.CSE_KMS.name())) {
- skip(SERVER_SIDE_ENCRYPTION_KEY + " is not set for " +
- SSE_KMS.getMethod() + " or CSE-KMS algorithm is used instead of "
- + "SSE-KMS");
- }
+ skipIfEncryptionNotSet(c, SSE_KMS);
super.setup();
}
@@ -61,19 +52,19 @@ protected String getBlockOutputBufferName() {
/**
* @param fileSystem
- * @return true if {@link Constants#SERVER_SIDE_ENCRYPTION_KEY} is set
+ * @return true if {@link Constants#S3_ENCRYPTION_KEY} is set
* in the config.
*/
@Override
protected boolean isEncrypted(S3AFileSystem fileSystem) {
Configuration c = new Configuration();
- return c.get(SERVER_SIDE_ENCRYPTION_KEY) != null;
+ return c.get(S3_ENCRYPTION_KEY) != null;
}
@Override
protected void assertEncrypted(Path hugeFile) throws IOException {
Configuration c = new Configuration();
- String kmsKey = c.get(SERVER_SIDE_ENCRYPTION_KEY);
+ String kmsKey = c.get(S3_ENCRYPTION_KEY);
EncryptionTestUtils.assertEncrypted(getFileSystem(), hugeFile,
SSE_KMS, kmsKey);
}
diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesSSECDiskBlocks.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesSSECDiskBlocks.java
index a1c5c3f4b6..d2e04d928d 100644
--- a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesSSECDiskBlocks.java
+++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/scale/ITestS3AHugeFilesSSECDiskBlocks.java
@@ -23,6 +23,8 @@
import org.apache.hadoop.fs.s3a.S3AEncryptionMethods;
import org.apache.hadoop.fs.s3a.S3ATestUtils;
+import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_ALGORITHM;
+import static org.apache.hadoop.fs.s3a.Constants.S3_ENCRYPTION_KEY;
import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM;
import static org.apache.hadoop.fs.s3a.Constants.SERVER_SIDE_ENCRYPTION_KEY;
import static org.apache.hadoop.fs.s3a.S3ATestUtils.removeBaseAndBucketOverrides;
@@ -45,15 +47,17 @@ public void setup() throws Exception {
skipIfEncryptionTestsDisabled(getConfiguration());
}
+ @SuppressWarnings("deprecation")
@Override
protected Configuration createScaleConfiguration() {
Configuration conf = super.createScaleConfiguration();
- removeBaseAndBucketOverrides(conf, SERVER_SIDE_ENCRYPTION_KEY,
- SERVER_SIDE_ENCRYPTION_ALGORITHM);
+ removeBaseAndBucketOverrides(conf, S3_ENCRYPTION_KEY,
+ S3_ENCRYPTION_ALGORITHM, SERVER_SIDE_ENCRYPTION_ALGORITHM,
+ SERVER_SIDE_ENCRYPTION_KEY);
S3ATestUtils.disableFilesystemCaching(conf);
- conf.set(Constants.SERVER_SIDE_ENCRYPTION_ALGORITHM,
+ conf.set(Constants.S3_ENCRYPTION_ALGORITHM,
getSSEAlgorithm().getMethod());
- conf.set(Constants.SERVER_SIDE_ENCRYPTION_KEY, KEY_1);
+ conf.set(Constants.S3_ENCRYPTION_KEY, KEY_1);
return conf;
}