HDDS-938. Add Client APIs for using S3 Auth interface.
Contributed by Dinesh Chitlangia.
This commit is contained in:
parent
30bfc9cbd0
commit
1d5734e341
@ -95,6 +95,7 @@ public final class OzoneConsts {
|
||||
public static final String CONTAINER_ROOT_PREFIX = "repository";
|
||||
|
||||
public static final String FILE_HASH = "SHA-256";
|
||||
public static final String MD5_HASH = "MD5";
|
||||
public final static String CHUNK_OVERWRITE = "OverWriteRequested";
|
||||
|
||||
public static final int CHUNK_SIZE = 1 * 1024 * 1024; // 1 MB
|
||||
|
@ -28,6 +28,7 @@
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hdds.scm.client.HddsClientUtils;
|
||||
import org.apache.hadoop.ozone.client.protocol.ClientProtocol;
|
||||
import org.apache.hadoop.ozone.om.helpers.S3SecretValue;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
@ -155,6 +156,10 @@ public OzoneVolume getVolume(String volumeName) throws IOException {
|
||||
return volume;
|
||||
}
|
||||
|
||||
public S3SecretValue getS3Secret(String kerberosID) throws IOException {
|
||||
return proxy.getS3Secret(kerberosID);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns Iterator to iterate over all buckets for a user.
|
||||
* The result can be restricted using bucket prefix, will return all
|
||||
|
@ -34,6 +34,8 @@
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.hadoop.ozone.om.helpers.S3SecretValue;
|
||||
import org.apache.hadoop.ozone.security.OzoneTokenIdentifier;
|
||||
import org.apache.hadoop.security.KerberosInfo;
|
||||
import org.apache.hadoop.security.token.Token;
|
||||
@ -479,4 +481,11 @@ long renewDelegationToken(Token<OzoneTokenIdentifier> token)
|
||||
void cancelDelegationToken(Token<OzoneTokenIdentifier> token)
|
||||
throws IOException;
|
||||
|
||||
/**
|
||||
* returns S3 Secret given kerberos user.
|
||||
* @param kerberosID
|
||||
* @return S3SecretValue
|
||||
* @throws IOException
|
||||
*/
|
||||
S3SecretValue getS3Secret(String kerberosID) throws IOException;
|
||||
}
|
||||
|
@ -45,6 +45,7 @@
|
||||
import org.apache.hadoop.ozone.om.OMConfigKeys;
|
||||
import org.apache.hadoop.ozone.om.helpers.OmMultipartInfo;
|
||||
import org.apache.hadoop.ozone.om.helpers.OmMultipartUploadCompleteInfo;
|
||||
import org.apache.hadoop.ozone.om.helpers.S3SecretValue;
|
||||
import org.apache.hadoop.ozone.om.helpers.ServiceInfo;
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.ServicePort;
|
||||
import org.apache.hadoop.ozone.security.OzoneTokenIdentifier;
|
||||
@ -711,6 +712,12 @@ public void cancelDelegationToken(Token<OzoneTokenIdentifier> token)
|
||||
throw new IOException("Method not supported");
|
||||
}
|
||||
|
||||
@Override
|
||||
public S3SecretValue getS3Secret(String kerberosID) throws IOException {
|
||||
throw new UnsupportedOperationException("Ozone REST protocol does not " +
|
||||
"support this operation.");
|
||||
}
|
||||
|
||||
@Override
|
||||
public OzoneInputStream getKey(
|
||||
String volumeName, String bucketName, String keyName)
|
||||
|
@ -54,6 +54,7 @@
|
||||
import org.apache.hadoop.ozone.om.helpers.OmMultipartUploadList;
|
||||
import org.apache.hadoop.ozone.om.helpers.OmVolumeArgs;
|
||||
import org.apache.hadoop.ozone.om.helpers.OpenKeySession;
|
||||
import org.apache.hadoop.ozone.om.helpers.S3SecretValue;
|
||||
import org.apache.hadoop.ozone.om.helpers.ServiceInfo;
|
||||
import org.apache.hadoop.ozone.om.protocolPB.OzoneManagerProtocolClientSideTranslatorPB;
|
||||
import org.apache.hadoop.ozone.om.protocolPB.OzoneManagerProtocolPB;
|
||||
@ -452,6 +453,20 @@ public void cancelDelegationToken(Token<OzoneTokenIdentifier> token)
|
||||
ozoneManagerClient.cancelDelegationToken(token);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns s3 secret given a kerberos user.
|
||||
* @param kerberosID
|
||||
* @return S3SecretValue
|
||||
* @throws IOException
|
||||
*/
|
||||
@Override
|
||||
public S3SecretValue getS3Secret(String kerberosID) throws IOException {
|
||||
Preconditions.checkArgument(Strings.isNotBlank(kerberosID),
|
||||
"kerberosID cannot be null or empty.");
|
||||
|
||||
return ozoneManagerClient.getS3Secret(kerberosID);
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setBucketVersioning(
|
||||
String volumeName, String bucketName, Boolean versioning)
|
||||
|
@ -18,10 +18,15 @@
|
||||
package org.apache.hadoop.ozone;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.security.MessageDigest;
|
||||
import java.security.NoSuchAlgorithmException;
|
||||
import java.util.Collection;
|
||||
import java.util.Optional;
|
||||
|
||||
import org.apache.commons.lang3.RandomStringUtils;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hdds.HddsConfigKeys;
|
||||
import org.apache.hadoop.hdds.server.ServerUtils;
|
||||
@ -184,4 +189,25 @@ public static boolean isReadOnly(
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
public static byte[] getMD5Digest(String input) throws IOException {
|
||||
try {
|
||||
MessageDigest md = MessageDigest.getInstance(OzoneConsts.MD5_HASH);
|
||||
return md.digest(input.getBytes(StandardCharsets.UTF_8));
|
||||
} catch (NoSuchAlgorithmException ex) {
|
||||
throw new IOException("Error creating an instance of MD5 digest.\n" +
|
||||
"This could possibly indicate a faulty JRE");
|
||||
}
|
||||
}
|
||||
|
||||
public static byte[] getSHADigest() throws IOException {
|
||||
try {
|
||||
MessageDigest sha = MessageDigest.getInstance(OzoneConsts.FILE_HASH);
|
||||
return sha.digest(RandomStringUtils.random(32)
|
||||
.getBytes(StandardCharsets.UTF_8));
|
||||
} catch (NoSuchAlgorithmException ex) {
|
||||
throw new IOException("Error creating an instance of SHA-256 digest.\n" +
|
||||
"This could possibly indicate a faulty JRE");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -67,6 +67,7 @@ public final class OzoneManagerLock {
|
||||
private static final String VOLUME_LOCK = "volumeLock";
|
||||
private static final String BUCKET_LOCK = "bucketLock";
|
||||
private static final String S3_BUCKET_LOCK = "s3BucketLock";
|
||||
private static final String S3_SECRET_LOCK = "s3SecretetLock";
|
||||
|
||||
private final LockManager<String> manager;
|
||||
|
||||
@ -76,7 +77,8 @@ public final class OzoneManagerLock {
|
||||
() -> ImmutableMap.of(
|
||||
VOLUME_LOCK, new AtomicInteger(0),
|
||||
BUCKET_LOCK, new AtomicInteger(0),
|
||||
S3_BUCKET_LOCK, new AtomicInteger(0)
|
||||
S3_BUCKET_LOCK, new AtomicInteger(0),
|
||||
S3_SECRET_LOCK, new AtomicInteger(0)
|
||||
)
|
||||
);
|
||||
|
||||
@ -219,4 +221,24 @@ private boolean hasAnyBucketLock() {
|
||||
private boolean hasAnyS3Lock() {
|
||||
return myLocks.get().get(S3_BUCKET_LOCK).get() != 0;
|
||||
}
|
||||
|
||||
public void acquireS3SecretLock(String awsAccessId) {
|
||||
if (hasAnyS3SecretLock()) {
|
||||
throw new RuntimeException(
|
||||
"Thread '" + Thread.currentThread().getName() +
|
||||
"' cannot acquire S3 Secret lock while holding S3 " +
|
||||
"awsAccessKey lock(s).");
|
||||
}
|
||||
manager.lock(awsAccessId);
|
||||
myLocks.get().get(S3_SECRET_LOCK).incrementAndGet();
|
||||
}
|
||||
|
||||
private boolean hasAnyS3SecretLock() {
|
||||
return myLocks.get().get(S3_SECRET_LOCK).get() != 0;
|
||||
}
|
||||
|
||||
public void releaseS3SecretLock(String awsAccessId) {
|
||||
manager.unlock(awsAccessId);
|
||||
myLocks.get().get(S3_SECRET_LOCK).decrementAndGet();
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,84 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
* <p>
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
* <p>
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.ozone.om.helpers;
|
||||
|
||||
import org.apache.commons.codec.digest.DigestUtils;
|
||||
import org.apache.hadoop.ozone.OmUtils;
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* S3Secret to be saved in database.
|
||||
*/
|
||||
public class S3SecretValue {
|
||||
private String kerberosID;
|
||||
private String awsSecret;
|
||||
private String awsAccessKey;
|
||||
|
||||
public S3SecretValue(String kerberosID, String awsSecret) throws IOException {
|
||||
this.kerberosID = kerberosID;
|
||||
this.awsSecret = awsSecret;
|
||||
this.awsAccessKey =
|
||||
DigestUtils.md5Hex(OmUtils.getMD5Digest(kerberosID));
|
||||
}
|
||||
|
||||
public String getKerberosID() {
|
||||
return kerberosID;
|
||||
}
|
||||
|
||||
public void setKerberosID(String kerberosID) {
|
||||
this.kerberosID = kerberosID;
|
||||
}
|
||||
|
||||
public String getAwsSecret() {
|
||||
return awsSecret;
|
||||
}
|
||||
|
||||
public void setAwsSecret(String awsSecret) {
|
||||
this.awsSecret = awsSecret;
|
||||
}
|
||||
|
||||
public String getAwsAccessKey() {
|
||||
return awsAccessKey;
|
||||
}
|
||||
|
||||
public void setAwsAccessKey(String awsAccessKey) {
|
||||
this.awsAccessKey = awsAccessKey;
|
||||
}
|
||||
|
||||
public static S3SecretValue fromProtobuf(
|
||||
OzoneManagerProtocolProtos.S3Secret s3Secret) throws IOException {
|
||||
return new S3SecretValue(s3Secret.getKerberosID(), s3Secret.getAwsSecret());
|
||||
}
|
||||
|
||||
public OzoneManagerProtocolProtos.S3Secret getProtobuf() {
|
||||
return OzoneManagerProtocolProtos.S3Secret.newBuilder()
|
||||
.setAwsSecret(this.awsSecret)
|
||||
.setKerberosID(this.kerberosID)
|
||||
.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "S3SecretValue{" +
|
||||
"kerberosID='" + kerberosID + '\'' +
|
||||
", awsSecret='" + awsSecret + '\'' +
|
||||
'}';
|
||||
}
|
||||
}
|
@ -29,6 +29,7 @@
|
||||
import org.apache.hadoop.ozone.om.helpers.OmMultipartUploadList;
|
||||
import org.apache.hadoop.ozone.om.helpers.OmVolumeArgs;
|
||||
import org.apache.hadoop.ozone.om.helpers.OpenKeySession;
|
||||
import org.apache.hadoop.ozone.om.helpers.S3SecretValue;
|
||||
import org.apache.hadoop.ozone.om.helpers.ServiceInfo;
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.OzoneAclInfo;
|
||||
|
||||
@ -312,7 +313,6 @@ List<OmBucketInfo> listS3Buckets(String userName, String startBucketName,
|
||||
String bucketPrefix, int maxNumOfBuckets)
|
||||
throws IOException;
|
||||
|
||||
|
||||
/**
|
||||
* Initiate multipart upload for the specified key.
|
||||
* @param keyArgs
|
||||
@ -350,5 +350,12 @@ OmMultipartUploadCompleteInfo completeMultipartUpload(
|
||||
*/
|
||||
void abortMultipartUpload(OmKeyArgs omKeyArgs) throws IOException;
|
||||
|
||||
/**
|
||||
* Gets s3Secret for given kerberos user.
|
||||
* @param kerberosID
|
||||
* @return S3SecretValue
|
||||
* @throws IOException
|
||||
*/
|
||||
S3SecretValue getS3Secret(String kerberosID) throws IOException;
|
||||
}
|
||||
|
||||
|
@ -37,8 +37,10 @@
|
||||
import org.apache.hadoop.ozone.om.helpers.OmMultipartUploadList;
|
||||
import org.apache.hadoop.ozone.om.helpers.OmVolumeArgs;
|
||||
import org.apache.hadoop.ozone.om.helpers.OpenKeySession;
|
||||
import org.apache.hadoop.ozone.om.helpers.S3SecretValue;
|
||||
import org.apache.hadoop.ozone.om.helpers.ServiceInfo;
|
||||
import org.apache.hadoop.ozone.om.protocol.OzoneManagerProtocol;
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos;
|
||||
import org.apache.hadoop.ozone.protocol.proto
|
||||
.OzoneManagerProtocolProtos.AllocateBlockRequest;
|
||||
import org.apache.hadoop.ozone.protocol.proto
|
||||
@ -159,6 +161,10 @@
|
||||
.S3ListBucketsRequest;
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos
|
||||
.S3ListBucketsResponse;
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos
|
||||
.GetS3SecretRequest;
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos
|
||||
.GetS3SecretResponse;
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos
|
||||
.OMRequest;
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos
|
||||
@ -963,6 +969,30 @@ public List<OmBucketInfo> listS3Buckets(String userName, String startKey,
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public S3SecretValue getS3Secret(String kerberosID) throws IOException {
|
||||
GetS3SecretRequest request = GetS3SecretRequest.newBuilder()
|
||||
.setKerberosID(kerberosID)
|
||||
.build();
|
||||
OMRequest omRequest = createOMRequest(Type.GetS3Secret)
|
||||
.setGetS3SecretRequest(request)
|
||||
.build();
|
||||
final GetS3SecretResponse resp = submitRequest(omRequest)
|
||||
.getGetS3SecretResponse();
|
||||
|
||||
if(resp.getStatus() != Status.OK) {
|
||||
throw new IOException("Fetch S3 Secret failed, error: " +
|
||||
resp.getStatus());
|
||||
} else {
|
||||
return S3SecretValue.fromProtobuf(resp.getS3Secret());
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the proxy object underlying this protocol translator.
|
||||
*
|
||||
* @return the proxy object underlying this protocol translator.
|
||||
*/
|
||||
@Override
|
||||
public OmMultipartInfo initiateMultipartUpload(OmKeyArgs omKeyArgs) throws
|
||||
IOException {
|
||||
|
@ -68,6 +68,7 @@ enum Type {
|
||||
CommitMultiPartUpload = 46;
|
||||
CompleteMultiPartUpload = 47;
|
||||
AbortMultiPartUpload = 48;
|
||||
GetS3Secret = 49;
|
||||
|
||||
ServiceList = 51;
|
||||
|
||||
@ -114,6 +115,7 @@ message OMRequest {
|
||||
optional MultipartCommitUploadPartRequest commitMultiPartUploadRequest = 46;
|
||||
optional MultipartUploadCompleteRequest completeMultiPartUploadRequest = 47;
|
||||
optional MultipartUploadAbortRequest abortMultiPartUploadRequest = 48;
|
||||
optional GetS3SecretRequest getS3SecretRequest = 49;
|
||||
|
||||
optional ServiceListRequest serviceListRequest = 51;
|
||||
|
||||
@ -162,6 +164,7 @@ message OMResponse {
|
||||
optional MultipartCommitUploadPartResponse commitMultiPartUploadResponse = 46;
|
||||
optional MultipartUploadCompleteResponse completeMultiPartUploadResponse = 47;
|
||||
optional MultipartUploadAbortResponse abortMultiPartUploadResponse = 48;
|
||||
optional GetS3SecretResponse getS3SecretResponse = 49;
|
||||
|
||||
optional ServiceListResponse ServiceListResponse = 51;
|
||||
|
||||
@ -194,6 +197,7 @@ enum Status {
|
||||
SCM_VERSION_MISMATCH_ERROR = 21;
|
||||
S3_BUCKET_NOT_FOUND = 22;
|
||||
S3_BUCKET_ALREADY_EXISTS = 23;
|
||||
|
||||
INITIATE_MULTIPART_UPLOAD_ERROR = 24;
|
||||
MULTIPART_UPLOAD_PARTFILE_ERROR = 25;
|
||||
NO_SUCH_MULTIPART_UPLOAD_ERROR = 26;
|
||||
@ -202,6 +206,8 @@ enum Status {
|
||||
COMPLETE_MULTIPART_UPLOAD_ERROR = 29;
|
||||
ENTITY_TOO_SMALL = 30;
|
||||
ABORT_MULTIPART_UPLOAD_FAILED = 31;
|
||||
|
||||
S3_SECRET_NOT_FOUND = 32;
|
||||
}
|
||||
|
||||
|
||||
@ -669,6 +675,20 @@ message CancelDelegationTokenResponseProto{
|
||||
optional hadoop.common.CancelDelegationTokenResponseProto response = 2;
|
||||
}
|
||||
|
||||
message S3Secret {
|
||||
required string kerberosID = 1;
|
||||
required string awsSecret = 2;
|
||||
}
|
||||
|
||||
message GetS3SecretRequest {
|
||||
required string kerberosID = 1;
|
||||
}
|
||||
|
||||
message GetS3SecretResponse {
|
||||
required Status status = 1;
|
||||
required S3Secret s3Secret = 2;
|
||||
}
|
||||
|
||||
/**
|
||||
The OM service that takes care of Ozone namespace.
|
||||
*/
|
||||
|
@ -20,8 +20,11 @@
|
||||
|
||||
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
|
||||
import org.apache.hadoop.hdds.scm.ScmConfigKeys;
|
||||
import org.apache.hadoop.ozone.om.helpers.S3SecretValue;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Assert;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
@ -52,4 +55,23 @@ public static void init() throws Exception {
|
||||
public static void shutdown() throws IOException {
|
||||
shutdownCluster();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetS3Secret() throws IOException {
|
||||
//Creates a secret since it does not exist
|
||||
S3SecretValue firstAttempt = TestOzoneRpcClient.getStore()
|
||||
.getS3Secret("HADOOP/JOHNDOE");
|
||||
|
||||
//Fetches the secret from db since it was created in previous step
|
||||
S3SecretValue secondAttempt = TestOzoneRpcClient.getStore()
|
||||
.getS3Secret("HADOOP/JOHNDOE");
|
||||
|
||||
//secret fetched on both attempts must be same
|
||||
Assert.assertTrue(firstAttempt.getAwsSecret()
|
||||
.equals(secondAttempt.getAwsSecret()));
|
||||
|
||||
//access key fetched on both attempts must be same
|
||||
Assert.assertTrue(firstAttempt.getAwsAccessKey()
|
||||
.equals(secondAttempt.getAwsAccessKey()));
|
||||
}
|
||||
}
|
||||
|
@ -169,6 +169,10 @@ public static void setStore(ObjectStore store) {
|
||||
TestOzoneRpcClientAbstract.store = store;
|
||||
}
|
||||
|
||||
public static ObjectStore getStore() {
|
||||
return TestOzoneRpcClientAbstract.store;
|
||||
}
|
||||
|
||||
public static void setScmId(String scmId){
|
||||
TestOzoneRpcClientAbstract.SCM_ID = scmId;
|
||||
}
|
||||
|
@ -23,9 +23,9 @@
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
import com.google.common.base.Preconditions;
|
||||
import com.google.protobuf.BlockingService;
|
||||
|
||||
import java.security.KeyPair;
|
||||
import java.util.Objects;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.hdds.HddsConfigKeys;
|
||||
import org.apache.hadoop.hdds.HddsUtils;
|
||||
@ -45,18 +45,18 @@
|
||||
import org.apache.hadoop.hdfs.DFSUtil;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.ipc.Server;
|
||||
import org.apache.hadoop.ozone.OmUtils;
|
||||
import org.apache.hadoop.ipc.Client;
|
||||
import org.apache.hadoop.ipc.ProtobufRpcEngine;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ozone.OzoneSecurityUtil;
|
||||
import org.apache.hadoop.ozone.security.OzoneSecretManager;
|
||||
import org.apache.hadoop.ozone.om.helpers.S3SecretValue;
|
||||
import org.apache.hadoop.ozone.security.OzoneSecurityException;
|
||||
import org.apache.hadoop.ozone.security.OzoneTokenIdentifier;
|
||||
import org.apache.hadoop.security.AccessControlException;
|
||||
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
|
||||
import org.apache.hadoop.metrics2.util.MBeans;
|
||||
import org.apache.hadoop.net.NetUtils;
|
||||
import org.apache.hadoop.ozone.OmUtils;
|
||||
import org.apache.hadoop.ozone.OzoneConsts;
|
||||
import org.apache.hadoop.ozone.audit.AuditAction;
|
||||
import org.apache.hadoop.ozone.audit.AuditEventStatus;
|
||||
@ -116,7 +116,6 @@
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import javax.management.ObjectName;
|
||||
import javax.ws.rs.HEAD;
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
@ -133,6 +132,7 @@
|
||||
import java.util.Map;
|
||||
import java.util.Timer;
|
||||
import java.util.TimerTask;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.apache.hadoop.hdds.HddsUtils.getScmAddressForBlockClients;
|
||||
import static org.apache.hadoop.hdds.HddsUtils.getScmAddressForClients;
|
||||
@ -220,6 +220,7 @@ public final class OzoneManager extends ServiceRuntimeInfoImpl
|
||||
private final IAccessAuthorizer accessAuthorizer;
|
||||
private JvmPauseMonitor jvmPauseMonitor;
|
||||
private final SecurityConfig secConfig;
|
||||
private final S3SecretManager s3SecretManager;
|
||||
|
||||
private OzoneManager(OzoneConfiguration conf) throws IOException {
|
||||
Preconditions.checkNotNull(conf);
|
||||
@ -301,6 +302,7 @@ private OzoneManager(OzoneConfiguration conf) throws IOException {
|
||||
volumeManager, bucketManager);
|
||||
keyManager = new KeyManagerImpl(scmBlockClient, metadataManager,
|
||||
configuration, omStorage.getOmId(), blockTokenMgr);
|
||||
s3SecretManager = new S3SecretManagerImpl(configuration, metadataManager);
|
||||
|
||||
shutdownHook = () -> {
|
||||
saveOmMetrics();
|
||||
@ -1876,6 +1878,14 @@ public void deleteS3Bucket(String s3BucketName) throws IOException {
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public S3SecretValue getS3Secret(String kerberosID) throws IOException{
|
||||
return s3SecretManager.getS3Secret(kerberosID);
|
||||
}
|
||||
|
||||
@Override
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
|
@ -0,0 +1,30 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
package org.apache.hadoop.ozone.om;
|
||||
|
||||
import org.apache.hadoop.ozone.om.helpers.S3SecretValue;
|
||||
|
||||
import java.io.IOException;
|
||||
/**
|
||||
* Interface to manager s3 secret.
|
||||
*/
|
||||
public interface S3SecretManager {
|
||||
|
||||
S3SecretValue getS3Secret(String kerberosID) throws IOException;
|
||||
}
|
@ -0,0 +1,82 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*
|
||||
*/
|
||||
package org.apache.hadoop.ozone.om;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.commons.codec.digest.DigestUtils;
|
||||
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
|
||||
import org.apache.hadoop.ozone.OmUtils;
|
||||
import org.apache.hadoop.ozone.om.helpers.S3SecretValue;
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos;
|
||||
import org.apache.logging.log4j.util.Strings;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* S3 Secret manager.
|
||||
*/
|
||||
public class S3SecretManagerImpl implements S3SecretManager {
|
||||
private static final Logger LOG =
|
||||
LoggerFactory.getLogger(S3SecretManagerImpl.class);
|
||||
|
||||
/**
|
||||
* OMMetadataManager is used for accessing OM MetadataDB and ReadWriteLock.
|
||||
*/
|
||||
private final OMMetadataManager omMetadataManager;
|
||||
private final OzoneConfiguration configuration;
|
||||
|
||||
/**
|
||||
* Constructs S3SecretManager.
|
||||
*
|
||||
* @param omMetadataManager
|
||||
*/
|
||||
public S3SecretManagerImpl(OzoneConfiguration configuration,
|
||||
OMMetadataManager omMetadataManager) {
|
||||
this.configuration = configuration;
|
||||
this.omMetadataManager = omMetadataManager;
|
||||
}
|
||||
|
||||
@Override
|
||||
public S3SecretValue getS3Secret(String kerberosID) throws IOException {
|
||||
Preconditions.checkArgument(Strings.isNotBlank(kerberosID),
|
||||
"kerberosID cannot be null or empty.");
|
||||
byte[] awsAccessKey = OmUtils.getMD5Digest(kerberosID);
|
||||
S3SecretValue result = null;
|
||||
omMetadataManager.getLock().acquireS3SecretLock(kerberosID);
|
||||
try {
|
||||
byte[] s3Secret =
|
||||
omMetadataManager.getS3SecretTable().get(awsAccessKey);
|
||||
if(s3Secret == null) {
|
||||
byte[] secret = OmUtils.getSHADigest();
|
||||
result = new S3SecretValue(kerberosID, DigestUtils.sha256Hex(secret));
|
||||
omMetadataManager.getS3SecretTable()
|
||||
.put(awsAccessKey, result.getProtobuf().toByteArray());
|
||||
} else {
|
||||
result = S3SecretValue.fromProtobuf(
|
||||
OzoneManagerProtocolProtos.S3Secret.parseFrom(s3Secret));
|
||||
}
|
||||
result.setAwsAccessKey(DigestUtils.md5Hex(awsAccessKey));
|
||||
} finally {
|
||||
omMetadataManager.getLock().releaseS3SecretLock(kerberosID);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
}
|
@ -116,6 +116,7 @@ public enum ResultCodes {
|
||||
SCM_IN_CHILL_MODE,
|
||||
S3_BUCKET_ALREADY_EXISTS,
|
||||
S3_BUCKET_NOT_FOUND,
|
||||
S3_SECRET_NOT_FOUND,
|
||||
INITIATE_MULTIPART_UPLOAD_FAILED,
|
||||
NO_SUCH_MULTIPART_UPLOAD,
|
||||
UPLOAD_PART_FAILED,
|
||||
|
@ -39,6 +39,7 @@
|
||||
import org.apache.hadoop.ozone.om.helpers.OpenKeySession;
|
||||
import org.apache.hadoop.ozone.om.helpers.ServiceInfo;
|
||||
import org.apache.hadoop.ozone.om.protocol.OzoneManagerProtocol;
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos;
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos
|
||||
.AllocateBlockRequest;
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos
|
||||
@ -162,6 +163,8 @@
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.GetDelegationTokenResponseProto;
|
||||
import org.apache.hadoop.security.proto.SecurityProtos.RenewDelegationTokenRequestProto;
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.RenewDelegationTokenResponseProto;
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.GetS3SecretRequest;
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.GetS3SecretResponse;
|
||||
import org.apache.hadoop.security.token.Token;
|
||||
|
||||
import org.apache.hadoop.ozone.protocol.proto.OzoneManagerProtocolProtos.Status;
|
||||
@ -343,6 +346,11 @@ public OMResponse handle(OMRequest request) {
|
||||
request.getCancelDelegationTokenRequest());
|
||||
responseBuilder.setCancelDelegationTokenResponse(cancelDtResp);
|
||||
break;
|
||||
case GetS3Secret:
|
||||
GetS3SecretResponse getS3SecretResp = getS3Secret(request
|
||||
.getGetS3SecretRequest());
|
||||
responseBuilder.setGetS3SecretResponse(getS3SecretResp);
|
||||
break;
|
||||
default:
|
||||
responseBuilder.setSuccess(false);
|
||||
responseBuilder.setMessage("Unrecognized Command Type: " + cmdType);
|
||||
@ -998,4 +1006,17 @@ private CancelDelegationTokenResponseProto cancelDelegationToken(
|
||||
}
|
||||
return rb.build();
|
||||
}
|
||||
|
||||
private OzoneManagerProtocolProtos.GetS3SecretResponse getS3Secret(
|
||||
OzoneManagerProtocolProtos.GetS3SecretRequest request) {
|
||||
OzoneManagerProtocolProtos.GetS3SecretResponse.Builder rb =
|
||||
OzoneManagerProtocolProtos.GetS3SecretResponse.newBuilder();
|
||||
try {
|
||||
rb.setS3Secret(impl.getS3Secret(request.getKerberosID()).getProtobuf());
|
||||
rb.setStatus(Status.OK);
|
||||
} catch (IOException ex) {
|
||||
rb.setStatus(exceptionToResponseStatus(ex));
|
||||
}
|
||||
return rb.build();
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user