HADOOP-9173. Add security token protobuf definition to common and use it in hdfs. Contributed by Suresh Srinivas.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1428972 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Suresh Srinivas 2013-01-04 16:49:49 +00:00
parent 6083e9cf8f
commit 251b485af5
13 changed files with 40 additions and 35 deletions

View File

@ -448,6 +448,9 @@ Release 2.0.3-alpha - Unreleased
that has reached its soft limit but not the hard limit. (Derek Dagit via
szetszwo)
HADOOP-9173. Add security token protobuf definition to common and
use it in hdfs. (suresh)
OPTIMIZATIONS
BUG FIXES

View File

@ -420,8 +420,10 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<configuration>
<executable>protoc</executable>
<arguments>
<argument>-I../../hadoop-common-project/hadoop-common/src/main/proto/</argument>
<argument>-Isrc/main/proto/</argument>
<argument>--java_out=target/generated-sources/java</argument>
<argument>src/main/proto/hdfs.proto</argument>
<argument>src/main/proto/GetUserMappingsProtocol.proto</argument>
<argument>src/main/proto/HAZKInfo.proto</argument>
<argument>src/main/proto/InterDatanodeProtocol.proto</argument>
@ -429,7 +431,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<argument>src/main/proto/RefreshAuthorizationPolicyProtocol.proto</argument>
<argument>src/main/proto/RefreshUserMappingsProtocol.proto</argument>
<argument>src/main/proto/datatransfer.proto</argument>
<argument>src/main/proto/hdfs.proto</argument>
</arguments>
</configuration>
</execution>
@ -442,6 +443,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<configuration>
<executable>protoc</executable>
<arguments>
<argument>-I../../hadoop-common-project/hadoop-common/src/main/proto/</argument>
<argument>-Isrc/main/proto/</argument>
<argument>--java_out=target/generated-sources/java</argument>
<argument>src/main/proto/ClientDatanodeProtocol.proto</argument>
@ -458,6 +460,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<configuration>
<executable>protoc</executable>
<arguments>
<argument>-I../../hadoop-common-project/hadoop-common/src/main/proto/</argument>
<argument>-Isrc/main/proto/</argument>
<argument>--java_out=target/generated-sources/java</argument>
<argument>src/main/proto/ClientNamenodeProtocol.proto</argument>
@ -474,6 +477,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<configuration>
<executable>protoc</executable>
<arguments>
<argument>-I../../hadoop-common-project/hadoop-common/src/main/proto/</argument>
<argument>-Isrc/main/proto/</argument>
<argument>--java_out=target/generated-sources/java</argument>
<argument>src/main/proto/QJournalProtocol.proto</argument>

View File

@ -143,6 +143,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
<configuration>
<executable>protoc</executable>
<arguments>
<argument>-I../../../../../hadoop-common-project/hadoop-common/src/main/proto/</argument>
<argument>-Isrc/main/proto/</argument>
<argument>-I../../main/proto</argument>
<argument>--java_out=target/generated-sources/java</argument>

View File

@ -30,6 +30,7 @@
import org.apache.hadoop.hdfs.util.ExactSizeInputStream;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.util.DataChecksum;
import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
import org.apache.hadoop.security.token.Token;
import com.google.common.collect.Lists;
@ -47,8 +48,8 @@ public abstract class HdfsProtoUtil {
//// Block Token ////
public static HdfsProtos.BlockTokenIdentifierProto toProto(Token<?> blockToken) {
return HdfsProtos.BlockTokenIdentifierProto.newBuilder()
public static TokenProto toProto(Token<?> blockToken) {
return TokenProto.newBuilder()
.setIdentifier(ByteString.copyFrom(blockToken.getIdentifier()))
.setPassword(ByteString.copyFrom(blockToken.getPassword()))
.setKind(blockToken.getKind().toString())
@ -56,7 +57,7 @@ public static HdfsProtos.BlockTokenIdentifierProto toProto(Token<?> blockToken)
.build();
}
public static Token<BlockTokenIdentifier> fromProto(HdfsProtos.BlockTokenIdentifierProto proto) {
public static Token<BlockTokenIdentifier> fromProto(TokenProto proto) {
return new Token<BlockTokenIdentifier>(proto.getIdentifier().toByteArray(),
proto.getPassword().toByteArray(),
new Text(proto.getKind()),

View File

@ -37,9 +37,9 @@
import org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos.GetReplicaVisibleLengthResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos.RefreshNamenodesRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos.RefreshNamenodesResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto;
import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
import org.apache.hadoop.security.token.Token;
import com.google.protobuf.ByteString;
@ -133,7 +133,7 @@ public GetHdfsBlockLocationsResponseProto getHdfsBlockLocations(
}
List<Token<BlockTokenIdentifier>> tokens =
new ArrayList<Token<BlockTokenIdentifier>>(request.getTokensCount());
for (BlockTokenIdentifierProto b : request.getTokensList()) {
for (TokenProto b : request.getTokensList()) {
tokens.add(PBHelper.convert(b));
}
// Call the real implementation

View File

@ -44,7 +44,6 @@
import org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos.GetHdfsBlockLocationsResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos.GetReplicaVisibleLengthRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos.RefreshNamenodesRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto;
import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
import org.apache.hadoop.ipc.ProtobufHelper;
@ -55,6 +54,7 @@
import org.apache.hadoop.ipc.RpcClientUtil;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
import org.apache.hadoop.security.token.Token;
import com.google.protobuf.ByteString;
@ -222,8 +222,8 @@ public HdfsBlocksMetadata getHdfsBlocksMetadata(List<ExtendedBlock> blocks,
// Convert to proto objects
List<ExtendedBlockProto> blocksProtos =
new ArrayList<ExtendedBlockProto>(blocks.size());
List<BlockTokenIdentifierProto> tokensProtos =
new ArrayList<BlockTokenIdentifierProto>(tokens.size());
List<TokenProto> tokensProtos =
new ArrayList<TokenProto>(tokens.size());
for (ExtendedBlock b : blocks) {
blocksProtos.add(PBHelper.convert(b));
}

View File

@ -125,12 +125,12 @@
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.UpdateBlockForPipelineResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.UpdatePipelineRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.UpdatePipelineResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto;
import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
import com.google.protobuf.RpcController;
import com.google.protobuf.ServiceException;
@ -774,7 +774,7 @@ public GetDelegationTokenResponseProto getDelegationToken(
RpcController controller, GetDelegationTokenRequestProto req)
throws ServiceException {
try {
BlockTokenIdentifierProto result = PBHelper.convert(server
TokenProto result = PBHelper.convert(server
.getDelegationToken(new Text(req.getRenewer())));
return GetDelegationTokenResponseProto.newBuilder().setToken(result)
.build();

View File

@ -64,7 +64,6 @@
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto;
@ -130,6 +129,7 @@
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest;
import org.apache.hadoop.io.EnumSetWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
import org.apache.hadoop.security.token.Token;
import com.google.protobuf.ByteString;
@ -552,8 +552,8 @@ public static LocatedBlock convert(LocatedBlockProto proto) {
return lb;
}
public static BlockTokenIdentifierProto convert(Token<?> tok) {
return BlockTokenIdentifierProto.newBuilder().
public static TokenProto convert(Token<?> tok) {
return TokenProto.newBuilder().
setIdentifier(ByteString.copyFrom(tok.getIdentifier())).
setPassword(ByteString.copyFrom(tok.getPassword())).
setKind(tok.getKind().toString()).
@ -561,7 +561,7 @@ public static BlockTokenIdentifierProto convert(Token<?> tok) {
}
public static Token<BlockTokenIdentifier> convert(
BlockTokenIdentifierProto blockToken) {
TokenProto blockToken) {
return new Token<BlockTokenIdentifier>(blockToken.getIdentifier()
.toByteArray(), blockToken.getPassword().toByteArray(), new Text(
blockToken.getKind()), new Text(blockToken.getService()));
@ -569,7 +569,7 @@ public static Token<BlockTokenIdentifier> convert(
public static Token<DelegationTokenIdentifier> convertDelegationToken(
BlockTokenIdentifierProto blockToken) {
TokenProto blockToken) {
return new Token<DelegationTokenIdentifier>(blockToken.getIdentifier()
.toByteArray(), blockToken.getPassword().toByteArray(), new Text(
blockToken.getKind()), new Text(blockToken.getService()));

View File

@ -25,6 +25,7 @@ option java_generic_services = true;
option java_generate_equals_and_hash = true;
package hadoop.hdfs;
import "Security.proto";
import "hdfs.proto";
/**
@ -76,7 +77,7 @@ message DeleteBlockPoolResponseProto {
*/
message GetBlockLocalPathInfoRequestProto {
required ExtendedBlockProto block = 1;
required BlockTokenIdentifierProto token = 2;
required hadoop.common.TokenProto token = 2;
}
/**
@ -96,7 +97,7 @@ message GetBlockLocalPathInfoResponseProto {
*/
message GetHdfsBlockLocationsRequestProto {
repeated ExtendedBlockProto blocks = 1;
repeated BlockTokenIdentifierProto tokens = 2;
repeated hadoop.common.TokenProto tokens = 2;
}
/**

View File

@ -22,6 +22,7 @@ option java_generic_services = true;
option java_generate_equals_and_hash = true;
package hadoop.hdfs;
import "Security.proto";
import "hdfs.proto";
/**
@ -413,11 +414,11 @@ message GetDelegationTokenRequestProto {
}
message GetDelegationTokenResponseProto {
required BlockTokenIdentifierProto token = 1;
required hadoop.common.TokenProto token = 1;
}
message RenewDelegationTokenRequestProto {
required BlockTokenIdentifierProto token = 1;
required hadoop.common.TokenProto token = 1;
}
message RenewDelegationTokenResponseProto {
@ -425,7 +426,7 @@ message RenewDelegationTokenResponseProto {
}
message CancelDelegationTokenRequestProto {
required BlockTokenIdentifierProto token = 1;
required hadoop.common.TokenProto token = 1;
}
message CancelDelegationTokenResponseProto { // void response

View File

@ -24,6 +24,7 @@ option java_outer_classname = "DataTransferProtos";
option java_generate_equals_and_hash = true;
package hadoop.hdfs;
import "Security.proto";
import "hdfs.proto";
message DataTransferEncryptorMessageProto {
@ -39,7 +40,7 @@ message DataTransferEncryptorMessageProto {
message BaseHeaderProto {
required ExtendedBlockProto block = 1;
optional BlockTokenIdentifierProto token = 2;
optional hadoop.common.TokenProto token = 2;
}
message ClientOperationHeaderProto {

View File

@ -19,11 +19,14 @@
// This file contains protocol buffers that are used throughout HDFS -- i.e.
// by the client, server, and data transfer protocols.
option java_package = "org.apache.hadoop.hdfs.protocol.proto";
option java_outer_classname = "HdfsProtos";
option java_generate_equals_and_hash = true;
package hadoop.hdfs;
import "Security.proto";
/**
* Extended block idenfies a block
*/
@ -35,16 +38,6 @@ message ExtendedBlockProto {
// here for historical reasons
}
/**
* Block Token
*/
message BlockTokenIdentifierProto {
required bytes identifier = 1;
required bytes password = 2;
required string kind = 3;
required string service = 4;
}
/**
* Identifies a Datanode
*/
@ -126,7 +119,7 @@ message LocatedBlockProto {
// If block has few corrupt replicas, they are filtered and
// their locations are not part of this object
required BlockTokenIdentifierProto blockToken = 5;
required hadoop.common.TokenProto blockToken = 5;
}
message DataEncryptionKeyProto {

View File

@ -36,7 +36,6 @@
import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.DatanodeRegistrationProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto;
@ -69,6 +68,7 @@
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog;
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
import org.apache.hadoop.security.token.Token;
import org.junit.Test;
@ -374,7 +374,7 @@ public void testConvertBlockToken() {
Token<BlockTokenIdentifier> token = new Token<BlockTokenIdentifier>(
"identifier".getBytes(), "password".getBytes(), new Text("kind"),
new Text("service"));
BlockTokenIdentifierProto tokenProto = PBHelper.convert(token);
TokenProto tokenProto = PBHelper.convert(token);
Token<BlockTokenIdentifier> token2 = PBHelper.convert(tokenProto);
compare(token, token2);
}