diff --git a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml index 74c030edb2..b3f8cd57f9 100644 --- a/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml +++ b/hadoop-common-project/hadoop-common/src/main/resources/core-default.xml @@ -757,12 +757,17 @@ fs.s3a.access.key - AWS access key ID used by S3A file system. Omit for Role-based authentication. + AWS access key ID used by S3A file system. Omit for IAM role-based or provider-based authentication. fs.s3a.secret.key - AWS secret key used by S3A file system. Omit for Role-based authentication. + AWS secret key used by S3A file system. Omit for IAM role-based or provider-based authentication. + + + + fs.s3a.aws.credentials.provider + Class name of a credentials provider that implements com.amazonaws.auth.AWSCredentialsProvider. Omit if using access/secret keys or another authentication mechanism. diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java index a800082104..be123981e2 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/Constants.java @@ -37,6 +37,10 @@ private Constants() { // s3 secret key public static final String SECRET_KEY = "fs.s3a.secret.key"; + // aws credentials provider + public static final String AWS_CREDENTIALS_PROVIDER = + "fs.s3a.aws.credentials.provider"; + // number of simultaneous connections to s3 public static final String MAXIMUM_CONNECTIONS = "fs.s3a.connection.maximum"; public static final int DEFAULT_MAXIMUM_CONNECTIONS = 15; diff --git a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java index 44b8aae364..1a043f0b74 100644 --- a/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java +++ b/hadoop-tools/hadoop-aws/src/main/java/org/apache/hadoop/fs/s3a/S3AFileSystem.java @@ -35,8 +35,8 @@ import com.amazonaws.AmazonServiceException; import com.amazonaws.ClientConfiguration; import com.amazonaws.Protocol; +import com.amazonaws.auth.AWSCredentialsProvider; import com.amazonaws.auth.AWSCredentialsProviderChain; - import com.amazonaws.auth.InstanceProfileCredentialsProvider; import com.amazonaws.services.s3.AmazonS3Client; import com.amazonaws.services.s3.S3ClientOptions; @@ -55,11 +55,10 @@ import com.amazonaws.services.s3.transfer.Upload; import com.amazonaws.event.ProgressListener; import com.amazonaws.event.ProgressEvent; - import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; -import org.apache.commons.lang.StringUtils; +import org.apache.commons.lang.StringUtils; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -133,17 +132,10 @@ public void initialize(URI name, Configuration conf) throws IOException { workingDir = new Path("/user", System.getProperty("user.name")) .makeQualified(this.uri, this.getWorkingDirectory()); - AWSAccessKeys creds = getAWSAccessKeys(name, conf); - - AWSCredentialsProviderChain credentials = new AWSCredentialsProviderChain( - new BasicAWSCredentialsProvider( - creds.getAccessKey(), creds.getAccessSecret()), - new InstanceProfileCredentialsProvider(), - new AnonymousAWSCredentialsProvider() - ); - bucket = name.getHost(); + AWSCredentialsProvider credentials = getAWSCredentialsProvider(name, conf); + ClientConfiguration awsConf = new ClientConfiguration(); awsConf.setMaxConnections(intOption(conf, MAXIMUM_CONNECTIONS, DEFAULT_MAXIMUM_CONNECTIONS, 1)); @@ -280,7 +272,7 @@ private void initUserAgent(Configuration conf, ClientConfiguration awsConf) { } private void initAmazonS3Client(Configuration conf, - AWSCredentialsProviderChain credentials, ClientConfiguration awsConf) + AWSCredentialsProvider credentials, ClientConfiguration awsConf) throws IllegalArgumentException { s3 = new AmazonS3Client(credentials, awsConf); String endPoint = conf.getTrimmed(ENDPOINT, ""); @@ -395,6 +387,48 @@ AWSAccessKeys getAWSAccessKeys(URI name, Configuration conf) return new AWSAccessKeys(accessKey, secretKey); } + /** + * Create the standard credential provider, or load in one explicitly + * identified in the configuration. + * @param binding the S3 binding/bucket. + * @param conf configuration + * @return a credential provider + * @throws IOException on any problem. Class construction issues may be + * nested inside the IOE. + */ + private AWSCredentialsProvider getAWSCredentialsProvider(URI binding, + Configuration conf) throws IOException { + AWSCredentialsProvider credentials; + + String className = conf.getTrimmed(AWS_CREDENTIALS_PROVIDER); + if (StringUtils.isEmpty(className)) { + AWSAccessKeys creds = getAWSAccessKeys(binding, conf); + credentials = new AWSCredentialsProviderChain( + new BasicAWSCredentialsProvider( + creds.getAccessKey(), creds.getAccessSecret()), + new InstanceProfileCredentialsProvider(), + new AnonymousAWSCredentialsProvider() + ); + + } else { + try { + LOG.debug("Credential provider class is {}", className); + credentials = (AWSCredentialsProvider) Class.forName(className) + .getDeclaredConstructor(URI.class, Configuration.class) + .newInstance(this.uri, conf); + } catch (ClassNotFoundException e) { + throw new IOException(className + " not found.", e); + } catch (NoSuchMethodException | SecurityException e) { + throw new IOException(className + " constructor exception.", e); + } catch (ReflectiveOperationException | IllegalArgumentException e) { + throw new IOException(className + " instantiation exception.", e); + } + LOG.debug("Using {} for {}.", credentials, this.uri); + } + + return credentials; + } + /** * Return the protocol scheme for the FileSystem. * @@ -1392,7 +1426,7 @@ public String toString() { .append('\''); } sb.append(", statistics {") - .append(statistics.toString()) + .append(statistics) .append("}"); sb.append(", metrics {") .append(instrumentation.dump("{", "=", "} ", true)) diff --git a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md index fe81400d3d..a81aff91e0 100644 --- a/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md +++ b/hadoop-tools/hadoop-aws/src/site/markdown/tools/hadoop-aws/index.md @@ -177,20 +177,27 @@ If you do any of these: change your credentials immediately! fs.s3a.access.key - AWS access key ID. Omit for Role-based authentication. + AWS access key ID. Omit for IAM role-based or provider-based authentication. fs.s3a.secret.key - AWS secret key. Omit for Role-based authentication. + AWS secret key. Omit for IAM role-based or provider-based authentication. + + + + fs.s3a.aws.credentials.provider + Class name of a credentials provider that implements com.amazonaws.auth.AWSCredentialsProvider. + Omit if using access/secret keys or another authentication mechanism. #### Protecting the AWS Credentials in S3A -To protect these credentials from prying eyes, it is recommended that you use +To protect the access/secret keys from prying eyes, it is recommended that you +use either IAM role-based authentication (such as EC2 instance profile) or the credential provider framework securely storing them and accessing them - through configuration. The following describes its use for AWS credentials -in S3A FileSystem. +through configuration. The following describes using the latter for AWS +credentials in S3AFileSystem. For additional reading on the credential provider API see: [Credential Provider API](../../../hadoop-project-dist/hadoop-common/CredentialProviderAPI.html). @@ -560,13 +567,13 @@ Example: fs.s3a.access.key - AWS access key ID. Omit for Role-based authentication. + AWS access key ID. Omit for IAM role-based authentication. DONOTCOMMITTHISKEYTOSCM fs.s3a.secret.key - AWS secret key. Omit for Role-based authentication. + AWS secret key. Omit for IAM role-based authentication. DONOTEVERSHARETHISSECRETKEY! diff --git a/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AAWSCredentialsProvider.java b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AAWSCredentialsProvider.java new file mode 100644 index 0000000000..b20a768f28 --- /dev/null +++ b/hadoop-tools/hadoop-aws/src/test/java/org/apache/hadoop/fs/s3a/TestS3AAWSCredentialsProvider.java @@ -0,0 +1,106 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +package org.apache.hadoop.fs.s3a; + +import static org.apache.hadoop.fs.s3a.Constants.*; +import static org.junit.Assert.*; + +import java.io.IOException; +import java.net.URI; + +import org.apache.hadoop.conf.Configuration; +import org.junit.Test; + +import com.amazonaws.auth.AWSCredentials; +import com.amazonaws.auth.AWSCredentialsProvider; +import com.amazonaws.auth.AWSCredentialsProviderChain; +import com.amazonaws.auth.BasicAWSCredentials; +import com.amazonaws.auth.InstanceProfileCredentialsProvider; +import com.amazonaws.services.s3.model.AmazonS3Exception; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +/** + * Tests for {@link Constants#AWS_CREDENTIALS_PROVIDER} logic. + * + */ +public class TestS3AAWSCredentialsProvider { + private static final Logger LOG = + LoggerFactory.getLogger(TestS3AAWSCredentialsProvider.class); + + @Test + public void testBadConfiguration() throws IOException { + Configuration conf = new Configuration(); + conf.set(AWS_CREDENTIALS_PROVIDER, "no.such.class"); + try { + S3ATestUtils.createTestFileSystem(conf); + } catch (IOException e) { + if (!(e.getCause() instanceof ClassNotFoundException)) { + LOG.error("Unexpected nested cause: {} in {}", e.getCause(), e, e); + throw e; + } + } + } + + static class BadCredentialsProvider implements AWSCredentialsProvider { + + @SuppressWarnings("unused") + public BadCredentialsProvider(URI name, Configuration conf) { + } + + @Override + public AWSCredentials getCredentials() { + return new BasicAWSCredentials("bad_key", "bad_secret"); + } + + @Override + public void refresh() { + } + } + + @Test + public void testBadCredentials() throws Exception { + Configuration conf = new Configuration(); + conf.set(AWS_CREDENTIALS_PROVIDER, BadCredentialsProvider.class.getName()); + try { + S3ATestUtils.createTestFileSystem(conf); + } catch (AmazonS3Exception e) { + if (e.getStatusCode() != 403) { + LOG.error("Unexpected status code: {}", e.getStatusCode(), e); + throw e; + } + } + } + + static class GoodCredentialsProvider extends AWSCredentialsProviderChain { + + @SuppressWarnings("unused") + public GoodCredentialsProvider(URI name, Configuration conf) { + super(new BasicAWSCredentialsProvider(conf.get(ACCESS_KEY), + conf.get(SECRET_KEY)), new InstanceProfileCredentialsProvider()); + } + } + + @Test + public void testGoodProvider() throws Exception { + Configuration conf = new Configuration(); + conf.set(AWS_CREDENTIALS_PROVIDER, GoodCredentialsProvider.class.getName()); + S3ATestUtils.createTestFileSystem(conf); + } +}