diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index 14cd75a8ca..16002d5307 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -1050,6 +1050,8 @@ Release 2.7.0 - UNRELEASED
HADOOP-11674. oneByteBuf in CryptoInputStream and CryptoOutputStream
should be non static. (Sean Busbey via yliu)
+ HADOOP-11670. Regression: s3a auth setup broken. (Adam Budde via stevel)
+
Release 2.6.1 - UNRELEASED
INCOMPATIBLE CHANGES
diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java
index e7462dc11f..3486dfbedf 100644
--- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java
+++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java
@@ -18,8 +18,12 @@
package org.apache.hadoop.fs.s3a;
-
public class Constants {
+ // s3 access key
+ public static final String ACCESS_KEY = "fs.s3a.access.key";
+
+ // s3 secret key
+ public static final String SECRET_KEY = "fs.s3a.secret.key";
// number of simultaneous connections to s3
public static final String MAXIMUM_CONNECTIONS = "fs.s3a.connection.maximum";
diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java
index 1a30d6f340..91a606cf1f 100644
--- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java
+++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java
@@ -32,8 +32,6 @@
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
-import org.apache.hadoop.fs.s3.S3Credentials;
-
import com.amazonaws.AmazonClientException;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.ClientConfiguration;
@@ -159,12 +157,22 @@ public void initialize(URI name, Configuration conf) throws IOException {
this.getWorkingDirectory());
// Try to get our credentials or just connect anonymously
- S3Credentials s3Credentials = new S3Credentials();
- s3Credentials.initialize(name, conf);
+ String accessKey = conf.get(ACCESS_KEY, null);
+ String secretKey = conf.get(SECRET_KEY, null);
+
+ String userInfo = name.getUserInfo();
+ if (userInfo != null) {
+ int index = userInfo.indexOf(':');
+ if (index != -1) {
+ accessKey = userInfo.substring(0, index);
+ secretKey = userInfo.substring(index + 1);
+ } else {
+ accessKey = userInfo;
+ }
+ }
AWSCredentialsProviderChain credentials = new AWSCredentialsProviderChain(
- new BasicAWSCredentialsProvider(s3Credentials.getAccessKey(),
- s3Credentials.getSecretAccessKey()),
+ new BasicAWSCredentialsProvider(accessKey, secretKey),
new InstanceProfileCredentialsProvider(),
new AnonymousAWSCredentialsProvider()
);
diff --git a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md
index bf62634891..e0389c05ca 100644
--- a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md
+++ b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md
@@ -141,12 +141,12 @@ If you do any of these: change your credentials immediately!
### Authentication properties
- fs.s3a.awsAccessKeyId
+ fs.s3a.access.key
AWS access key ID. Omit for Role-based authentication.
- fs.s3a.awsSecretAccessKey
+ fs.s3a.secret.key
AWS secret key. Omit for Role-based authentication.
@@ -411,13 +411,13 @@ Example:
- fs.s3a.awsAccessKeyId
+ fs.s3a.access.key
AWS access key ID. Omit for Role-based authentication.
- DONOTPCOMMITTHISKEYTOSCM
+ DONOTCOMMITTHISKEYTOSCM
- fs.s3a.awsSecretAccessKey
+ fs.s3a.secret.key
AWS secret key. Omit for Role-based authentication.
DONOTEVERSHARETHISSECRETKEY!