diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
index 7b4ae98ff8..0b6aa86ef6 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
+++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES.txt
@@ -448,6 +448,9 @@ Release 2.0.3-alpha - Unreleased
that has reached its soft limit but not the hard limit. (Derek Dagit via
szetszwo)
+ HADOOP-9173. Add security token protobuf definition to common and
+ use it in hdfs. (suresh)
+
OPTIMIZATIONS
BUG FIXES
diff --git a/hadoop-hdfs-project/hadoop-hdfs/pom.xml b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
index dc842ca283..67799c8570 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/pom.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs/pom.xml
@@ -420,8 +420,10 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
protoc
+ -I../../hadoop-common-project/hadoop-common/src/main/proto/
-Isrc/main/proto/
--java_out=target/generated-sources/java
+ src/main/proto/hdfs.proto
src/main/proto/GetUserMappingsProtocol.proto
src/main/proto/HAZKInfo.proto
src/main/proto/InterDatanodeProtocol.proto
@@ -429,7 +431,6 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
src/main/proto/RefreshAuthorizationPolicyProtocol.proto
src/main/proto/RefreshUserMappingsProtocol.proto
src/main/proto/datatransfer.proto
- src/main/proto/hdfs.proto
@@ -442,6 +443,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
protoc
+ -I../../hadoop-common-project/hadoop-common/src/main/proto/
-Isrc/main/proto/
--java_out=target/generated-sources/java
src/main/proto/ClientDatanodeProtocol.proto
@@ -458,6 +460,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
protoc
+ -I../../hadoop-common-project/hadoop-common/src/main/proto/
-Isrc/main/proto/
--java_out=target/generated-sources/java
src/main/proto/ClientNamenodeProtocol.proto
@@ -474,6 +477,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
protoc
+ -I../../hadoop-common-project/hadoop-common/src/main/proto/
-Isrc/main/proto/
--java_out=target/generated-sources/java
src/main/proto/QJournalProtocol.proto
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/pom.xml b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/pom.xml
index 3a37958ca6..e4379e97f4 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/pom.xml
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/contrib/bkjournal/pom.xml
@@ -143,6 +143,7 @@ http://maven.apache.org/xsd/maven-4.0.0.xsd">
protoc
+ -I../../../../../hadoop-common-project/hadoop-common/src/main/proto/
-Isrc/main/proto/
-I../../main/proto
--java_out=target/generated-sources/java
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsProtoUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsProtoUtil.java
index fe7446f674..ab8b95534b 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsProtoUtil.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocol/HdfsProtoUtil.java
@@ -30,6 +30,7 @@
import org.apache.hadoop.hdfs.util.ExactSizeInputStream;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.util.DataChecksum;
+import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
import org.apache.hadoop.security.token.Token;
import com.google.common.collect.Lists;
@@ -47,8 +48,8 @@ public abstract class HdfsProtoUtil {
//// Block Token ////
- public static HdfsProtos.BlockTokenIdentifierProto toProto(Token> blockToken) {
- return HdfsProtos.BlockTokenIdentifierProto.newBuilder()
+ public static TokenProto toProto(Token> blockToken) {
+ return TokenProto.newBuilder()
.setIdentifier(ByteString.copyFrom(blockToken.getIdentifier()))
.setPassword(ByteString.copyFrom(blockToken.getPassword()))
.setKind(blockToken.getKind().toString())
@@ -56,7 +57,7 @@ public static HdfsProtos.BlockTokenIdentifierProto toProto(Token> blockToken)
.build();
}
- public static Token fromProto(HdfsProtos.BlockTokenIdentifierProto proto) {
+ public static Token fromProto(TokenProto proto) {
return new Token(proto.getIdentifier().toByteArray(),
proto.getPassword().toByteArray(),
new Text(proto.getKind()),
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientDatanodeProtocolServerSideTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientDatanodeProtocolServerSideTranslatorPB.java
index cf447ce18a..210f334597 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientDatanodeProtocolServerSideTranslatorPB.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientDatanodeProtocolServerSideTranslatorPB.java
@@ -37,9 +37,9 @@
import org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos.GetReplicaVisibleLengthResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos.RefreshNamenodesRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos.RefreshNamenodesResponseProto;
-import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto;
import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
+import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
import org.apache.hadoop.security.token.Token;
import com.google.protobuf.ByteString;
@@ -133,7 +133,7 @@ public GetHdfsBlockLocationsResponseProto getHdfsBlockLocations(
}
List> tokens =
new ArrayList>(request.getTokensCount());
- for (BlockTokenIdentifierProto b : request.getTokensList()) {
+ for (TokenProto b : request.getTokensList()) {
tokens.add(PBHelper.convert(b));
}
// Call the real implementation
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientDatanodeProtocolTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientDatanodeProtocolTranslatorPB.java
index c7c8b08555..f38d0145c8 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientDatanodeProtocolTranslatorPB.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientDatanodeProtocolTranslatorPB.java
@@ -44,7 +44,6 @@
import org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos.GetHdfsBlockLocationsResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos.GetReplicaVisibleLengthRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientDatanodeProtocolProtos.RefreshNamenodesRequestProto;
-import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.ExtendedBlockProto;
import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
import org.apache.hadoop.ipc.ProtobufHelper;
@@ -55,6 +54,7 @@
import org.apache.hadoop.ipc.RpcClientUtil;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.UserGroupInformation;
+import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
import org.apache.hadoop.security.token.Token;
import com.google.protobuf.ByteString;
@@ -222,8 +222,8 @@ public HdfsBlocksMetadata getHdfsBlocksMetadata(List blocks,
// Convert to proto objects
List blocksProtos =
new ArrayList(blocks.size());
- List tokensProtos =
- new ArrayList(tokens.size());
+ List tokensProtos =
+ new ArrayList(tokens.size());
for (ExtendedBlock b : blocks) {
blocksProtos.add(PBHelper.convert(b));
}
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java
index 668fa00e07..1dc98cdff9 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/ClientNamenodeProtocolServerSideTranslatorPB.java
@@ -125,12 +125,12 @@
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.UpdateBlockForPipelineResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.UpdatePipelineRequestProto;
import org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos.UpdatePipelineResponseProto;
-import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeIDProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DatanodeInfoProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.LocatedBlockProto;
import org.apache.hadoop.hdfs.security.token.block.DataEncryptionKey;
import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
import com.google.protobuf.RpcController;
import com.google.protobuf.ServiceException;
@@ -774,7 +774,7 @@ public GetDelegationTokenResponseProto getDelegationToken(
RpcController controller, GetDelegationTokenRequestProto req)
throws ServiceException {
try {
- BlockTokenIdentifierProto result = PBHelper.convert(server
+ TokenProto result = PBHelper.convert(server
.getDelegationToken(new Text(req.getRenewer())));
return GetDelegationTokenResponseProto.newBuilder().setToken(result)
.build();
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
index e7833d1c2f..500e9bdd1f 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java
@@ -64,7 +64,6 @@
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.DataEncryptionKeyProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto;
-import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointCommandProto;
@@ -130,6 +129,7 @@
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest;
import org.apache.hadoop.io.EnumSetWritable;
import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
import org.apache.hadoop.security.token.Token;
import com.google.protobuf.ByteString;
@@ -552,8 +552,8 @@ public static LocatedBlock convert(LocatedBlockProto proto) {
return lb;
}
- public static BlockTokenIdentifierProto convert(Token> tok) {
- return BlockTokenIdentifierProto.newBuilder().
+ public static TokenProto convert(Token> tok) {
+ return TokenProto.newBuilder().
setIdentifier(ByteString.copyFrom(tok.getIdentifier())).
setPassword(ByteString.copyFrom(tok.getPassword())).
setKind(tok.getKind().toString()).
@@ -561,7 +561,7 @@ public static BlockTokenIdentifierProto convert(Token> tok) {
}
public static Token convert(
- BlockTokenIdentifierProto blockToken) {
+ TokenProto blockToken) {
return new Token(blockToken.getIdentifier()
.toByteArray(), blockToken.getPassword().toByteArray(), new Text(
blockToken.getKind()), new Text(blockToken.getService()));
@@ -569,7 +569,7 @@ public static Token convert(
public static Token convertDelegationToken(
- BlockTokenIdentifierProto blockToken) {
+ TokenProto blockToken) {
return new Token(blockToken.getIdentifier()
.toByteArray(), blockToken.getPassword().toByteArray(), new Text(
blockToken.getKind()), new Text(blockToken.getService()));
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientDatanodeProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientDatanodeProtocol.proto
index 5392077b37..ca24f7a4a4 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientDatanodeProtocol.proto
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientDatanodeProtocol.proto
@@ -25,6 +25,7 @@ option java_generic_services = true;
option java_generate_equals_and_hash = true;
package hadoop.hdfs;
+import "Security.proto";
import "hdfs.proto";
/**
@@ -76,7 +77,7 @@ message DeleteBlockPoolResponseProto {
*/
message GetBlockLocalPathInfoRequestProto {
required ExtendedBlockProto block = 1;
- required BlockTokenIdentifierProto token = 2;
+ required hadoop.common.TokenProto token = 2;
}
/**
@@ -96,7 +97,7 @@ message GetBlockLocalPathInfoResponseProto {
*/
message GetHdfsBlockLocationsRequestProto {
repeated ExtendedBlockProto blocks = 1;
- repeated BlockTokenIdentifierProto tokens = 2;
+ repeated hadoop.common.TokenProto tokens = 2;
}
/**
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto
index fe09ef70b1..0a10693853 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/ClientNamenodeProtocol.proto
@@ -22,6 +22,7 @@ option java_generic_services = true;
option java_generate_equals_and_hash = true;
package hadoop.hdfs;
+import "Security.proto";
import "hdfs.proto";
/**
@@ -413,11 +414,11 @@ message GetDelegationTokenRequestProto {
}
message GetDelegationTokenResponseProto {
- required BlockTokenIdentifierProto token = 1;
+ required hadoop.common.TokenProto token = 1;
}
message RenewDelegationTokenRequestProto {
- required BlockTokenIdentifierProto token = 1;
+ required hadoop.common.TokenProto token = 1;
}
message RenewDelegationTokenResponseProto {
@@ -425,7 +426,7 @@ message RenewDelegationTokenResponseProto {
}
message CancelDelegationTokenRequestProto {
- required BlockTokenIdentifierProto token = 1;
+ required hadoop.common.TokenProto token = 1;
}
message CancelDelegationTokenResponseProto { // void response
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/datatransfer.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/datatransfer.proto
index d202f79a97..8ce5fd7566 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/datatransfer.proto
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/datatransfer.proto
@@ -24,6 +24,7 @@ option java_outer_classname = "DataTransferProtos";
option java_generate_equals_and_hash = true;
package hadoop.hdfs;
+import "Security.proto";
import "hdfs.proto";
message DataTransferEncryptorMessageProto {
@@ -39,7 +40,7 @@ message DataTransferEncryptorMessageProto {
message BaseHeaderProto {
required ExtendedBlockProto block = 1;
- optional BlockTokenIdentifierProto token = 2;
+ optional hadoop.common.TokenProto token = 2;
}
message ClientOperationHeaderProto {
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/hdfs.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/hdfs.proto
index 16b149efb8..0bc26290a1 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/hdfs.proto
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/hdfs.proto
@@ -19,11 +19,14 @@
// This file contains protocol buffers that are used throughout HDFS -- i.e.
// by the client, server, and data transfer protocols.
+
option java_package = "org.apache.hadoop.hdfs.protocol.proto";
option java_outer_classname = "HdfsProtos";
option java_generate_equals_and_hash = true;
package hadoop.hdfs;
+import "Security.proto";
+
/**
* Extended block idenfies a block
*/
@@ -35,16 +38,6 @@ message ExtendedBlockProto {
// here for historical reasons
}
-/**
- * Block Token
- */
-message BlockTokenIdentifierProto {
- required bytes identifier = 1;
- required bytes password = 2;
- required string kind = 3;
- required string service = 4;
-}
-
/**
* Identifies a Datanode
*/
@@ -126,7 +119,7 @@ message LocatedBlockProto {
// If block has few corrupt replicas, they are filtered and
// their locations are not part of this object
- required BlockTokenIdentifierProto blockToken = 5;
+ required hadoop.common.TokenProto blockToken = 5;
}
message DataEncryptionKeyProto {
diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java
index c6776783ee..65a6ed0fe2 100644
--- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java
+++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java
@@ -36,7 +36,6 @@
import org.apache.hadoop.hdfs.protocol.proto.DatanodeProtocolProtos.DatanodeRegistrationProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockKeyProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto;
-import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTokenIdentifierProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockWithLocationsProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlocksWithLocationsProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.CheckpointSignatureProto;
@@ -69,6 +68,7 @@
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLog;
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest;
import org.apache.hadoop.io.Text;
+import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
import org.apache.hadoop.security.token.Token;
import org.junit.Test;
@@ -374,7 +374,7 @@ public void testConvertBlockToken() {
Token token = new Token(
"identifier".getBytes(), "password".getBytes(), new Text("kind"),
new Text("service"));
- BlockTokenIdentifierProto tokenProto = PBHelper.convert(token);
+ TokenProto tokenProto = PBHelper.convert(token);
Token token2 = PBHelper.convert(tokenProto);
compare(token, token2);
}