HADOOP-16557. [pb-upgrade] Upgrade protobuf.version to 3.7.1 (#1432)

HADOOP-16557. [pb-upgrade] Upgrade protobuf.version to 3.7.1. Contributed by Vinayakumar B.
This commit is contained in:
Vinayakumar B 2019-09-20 16:08:30 +05:30 committed by GitHub
parent d072d3304c
commit 1654497f98
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
20 changed files with 74 additions and 51 deletions

View File

@ -6,7 +6,7 @@ Requirements:
* Unix System
* JDK 1.8
* Maven 3.3 or later
* ProtocolBuffer 2.5.0
* ProtocolBuffer 3.7.1
* CMake 3.1 or newer (if compiling native code)
* Zlib devel (if compiling native code)
* Cyrus SASL devel (if compiling native code)
@ -62,8 +62,16 @@ Installing required packages for clean install of Ubuntu 14.04 LTS Desktop:
$ sudo apt-get -y install maven
* Native libraries
$ sudo apt-get -y install build-essential autoconf automake libtool cmake zlib1g-dev pkg-config libssl-dev libsasl2-dev
* ProtocolBuffer 2.5.0 (required)
$ sudo apt-get -y install protobuf-compiler
* ProtocolBuffer 3.7.1 (required)
$ mkdir -p /opt/protobuf-3.7-src \
&& curl -L -s -S \
https://github.com/protocolbuffers/protobuf/releases/download/v3.7.1/protobuf-java-3.7.1.tar.gz \
-o /opt/protobuf-3.7.1.tar.gz \
&& tar xzf /opt/protobuf-3.7.1.tar.gz --strip-components 1 -C /opt/protobuf-3.7-src \
&& cd /opt/protobuf-3.7-src \
&& ./configure\
&& make install \
&& rm -rf /opt/protobuf-3.7-src
Optional packages:
@ -397,11 +405,10 @@ Installing required dependencies for clean install of macOS 10.14:
* Install native libraries, only openssl is required to compile native code,
you may optionally install zlib, lz4, etc.
$ brew install openssl
* Protocol Buffers 2.5.0 (required), since 2.5.0 is no longer in Homebrew,
we need to compile it from source
$ wget https://github.com/protocolbuffers/protobuf/releases/download/v2.5.0/protobuf-2.5.0.tar.gz
$ tar zxvf protobuf-2.5.0.tar.gz
$ cd protobuf-2.5.0
* Protocol Buffers 3.7.1 (required)
$ wget https://github.com/protocolbuffers/protobuf/releases/download/v3.7.1/protobuf-java-3.7.1.tar.gz
$ mkdir -p protobuf-3.7 && tar zxvf protobuf-java-3.7.1.tar.gz --strip-components 1 -C protobuf-3.7
$ cd protobuf-3.7
$ ./configure
$ make
$ make check
@ -432,7 +439,7 @@ Requirements:
* Windows System
* JDK 1.8
* Maven 3.0 or later
* ProtocolBuffer 2.5.0
* ProtocolBuffer 3.7.1
* CMake 3.1 or newer
* Visual Studio 2010 Professional or Higher
* Windows SDK 8.1 (if building CPU rate control for the container executor)

View File

@ -229,6 +229,13 @@
<exclude>update*</exclude>
</excludes>
</filter>
<filter>
<artifact>com.google.protobuf:protobuf-java</artifact>
<excludes>
<exclude>google/protobuf/*.proto</exclude>
<exclude>google/protobuf/**/*.proto</exclude>
</excludes>
</filter>
</filters>
<relocations>
<relocation>

View File

@ -66,7 +66,7 @@ public String getClassName() {
* @return may be null if the code was newer than our protobuf definitions or none was given.
*/
public RpcErrorCodeProto getErrorCode() {
return RpcErrorCodeProto.valueOf(errorCode);
return RpcErrorCodeProto.forNumber(errorCode);
}
/**

View File

@ -106,7 +106,7 @@ Message getMessage() {
@Override
void writeTo(ResponseBuffer out) throws IOException {
int length = message.getSerializedSize();
length += CodedOutputStream.computeRawVarint32Size(length);
length += CodedOutputStream.computeUInt32SizeNoTag(length);
out.ensureCapacity(length);
message.writeDelimitedTo(out);
}

View File

@ -3274,10 +3274,10 @@ private byte[] setupResponseForProtobuf(
cos.writeRawByte((byte)((length >>> 16) & 0xFF));
cos.writeRawByte((byte)((length >>> 8) & 0xFF));
cos.writeRawByte((byte)((length >>> 0) & 0xFF));
cos.writeRawVarint32(header.getSerializedSize());
cos.writeUInt32NoTag(header.getSerializedSize());
header.writeTo(cos);
if (payload != null) {
cos.writeRawVarint32(payload.getSerializedSize());
cos.writeUInt32NoTag(payload.getSerializedSize());
payload.writeTo(cos);
}
return buf;
@ -3285,7 +3285,7 @@ private byte[] setupResponseForProtobuf(
private static int getDelimitedLength(Message message) {
int length = message.getSerializedSize();
return length + CodedOutputStream.computeRawVarint32Size(length);
return length + CodedOutputStream.computeUInt32SizeNoTag(length);
}
/**

View File

@ -69,7 +69,7 @@ public void testVarInt() throws IOException {
private void doVarIntTest(int value) throws IOException {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
CodedOutputStream cout = CodedOutputStream.newInstance(baos);
cout.writeRawVarint32(value);
cout.writeUInt32NoTag(value);
cout.flush();
DataInputStream dis = new DataInputStream(

View File

@ -75,7 +75,7 @@ private enum StatusFormat {
}
static Status getStatus(int header) {
return Status.valueOf((int) STATUS.BITS.retrieve(header));
return Status.forNumber((int) STATUS.BITS.retrieve(header));
}
static ECN getECN(int header) {

View File

@ -293,7 +293,7 @@ public static DataChecksum.Type convert(HdfsProtos.ChecksumTypeProto type) {
}
public static HdfsProtos.ChecksumTypeProto convert(DataChecksum.Type type) {
return HdfsProtos.ChecksumTypeProto.valueOf(type.id);
return HdfsProtos.ChecksumTypeProto.forNumber(type.id);
}
public static HdfsProtos.BlockChecksumTypeProto convert(
@ -1115,7 +1115,7 @@ public static BlockStoragePolicy convert(BlockStoragePolicyProto proto) {
}
public static FsActionProto convert(FsAction v) {
return FsActionProto.valueOf(v != null ? v.ordinal() : 0);
return FsActionProto.forNumber(v != null ? v.ordinal() : 0);
}
public static XAttrProto convertXAttrProto(XAttr a) {
@ -1157,7 +1157,7 @@ public static List<XAttr> convertXAttrs(List<XAttrProto> xAttrSpec) {
}
static XAttrNamespaceProto convert(XAttr.NameSpace v) {
return XAttrNamespaceProto.valueOf(v.ordinal());
return XAttrNamespaceProto.forNumber(v.ordinal());
}
static XAttr.NameSpace convert(XAttrNamespaceProto v) {
@ -1249,7 +1249,7 @@ public static List<AclEntry> convertAclEntry(List<AclEntryProto> aclSpec) {
}
static AclEntryScopeProto convert(AclEntryScope v) {
return AclEntryScopeProto.valueOf(v.ordinal());
return AclEntryScopeProto.forNumber(v.ordinal());
}
private static AclEntryScope convert(AclEntryScopeProto v) {
@ -1257,7 +1257,7 @@ private static AclEntryScope convert(AclEntryScopeProto v) {
}
static AclEntryTypeProto convert(AclEntryType e) {
return AclEntryTypeProto.valueOf(e.ordinal());
return AclEntryTypeProto.forNumber(e.ordinal());
}
private static AclEntryType convert(AclEntryTypeProto v) {
@ -3220,7 +3220,7 @@ public static ErasureCodingPolicyState convertECState(
public static HdfsProtos.ErasureCodingPolicyState convertECState(
ErasureCodingPolicyState state) {
return HdfsProtos.ErasureCodingPolicyState.valueOf(state.getValue());
return HdfsProtos.ErasureCodingPolicyState.forNumber(state.getValue());
}
/**
@ -3356,7 +3356,7 @@ public static List<AddBlockFlagProto> convertAddBlockFlags(
EnumSet<AddBlockFlag> flags) {
List<AddBlockFlagProto> ret = new ArrayList<>();
for (AddBlockFlag flag : flags) {
AddBlockFlagProto abfp = AddBlockFlagProto.valueOf(flag.getMode());
AddBlockFlagProto abfp = AddBlockFlagProto.forNumber(flag.getMode());
if (abfp != null) {
ret.add(abfp);
}
@ -3409,7 +3409,8 @@ public static List<OpenFilesTypeProto> convertOpenFileTypes(
EnumSet<OpenFilesType> types) {
List<OpenFilesTypeProto> typeProtos = new ArrayList<>();
for (OpenFilesType type : types) {
OpenFilesTypeProto typeProto = OpenFilesTypeProto.valueOf(type.getMode());
OpenFilesTypeProto typeProto = OpenFilesTypeProto
.forNumber(type.getMode());
if (typeProto != null) {
typeProtos.add(typeProto);
}

View File

@ -22,7 +22,7 @@
import org.apache.commons.codec.binary.Base64;
import com.google.protobuf.GeneratedMessage;
import com.google.protobuf.GeneratedMessageV3;
import com.google.protobuf.Message;
import com.google.protobuf.Message.Builder;
import com.google.protobuf.MessageOrBuilder;
@ -31,7 +31,7 @@
* Helper class for setting/getting data elements in an object backed by a
* protobuf implementation.
*/
public class FederationProtocolPBTranslator<P extends GeneratedMessage,
public class FederationProtocolPBTranslator<P extends GeneratedMessageV3,
B extends Builder, T extends MessageOrBuilder> {
/** Optional proto byte stream used to create this object. */

View File

@ -276,12 +276,12 @@ public void add(Replica replica) {
try {
// zig-zag to reduce size of legacy blocks
cos.writeSInt64NoTag(replica.getBlockId());
cos.writeRawVarint64(replica.getBytesOnDisk());
cos.writeRawVarint64(replica.getGenerationStamp());
cos.writeUInt64NoTag(replica.getBytesOnDisk());
cos.writeUInt64NoTag(replica.getGenerationStamp());
ReplicaState state = replica.getState();
// although state is not a 64-bit value, using a long varint to
// allow for future use of the upper bits
cos.writeRawVarint64(state.getValue());
cos.writeUInt64NoTag(state.getValue());
if (state == ReplicaState.FINALIZED) {
numFinalized++;
}

View File

@ -1015,8 +1015,9 @@ private SectionName(String name) {
}
}
private static int getOndiskTrunkSize(com.google.protobuf.GeneratedMessage s) {
return CodedOutputStream.computeRawVarint32Size(s.getSerializedSize())
private static int getOndiskTrunkSize(
com.google.protobuf.GeneratedMessageV3 s) {
return CodedOutputStream.computeUInt32SizeNoTag(s.getSerializedSize())
+ s.getSerializedSize();
}

View File

@ -433,7 +433,7 @@ private void dumpXattrs(INodeSection.XAttrFeatureProto xattrs) {
int ns = (XATTR_NAMESPACE_MASK & (encodedName >> XATTR_NAMESPACE_OFFSET)) |
((XATTR_NAMESPACE_EXT_MASK & (encodedName >> XATTR_NAMESPACE_EXT_OFFSET)) << 2);
o(INODE_SECTION_NS, XAttrProtos.XAttrProto.
XAttrNamespaceProto.valueOf(ns).toString());
XAttrNamespaceProto.forNumber(ns).toString());
o(SECTION_NAME, SerialNumberManager.XATTR.getString(
XATTR_NAME_MASK & (encodedName >> XATTR_NAME_OFFSET),
stringTable));

View File

@ -910,7 +910,7 @@ public void testFSServerDefaultsHelper() {
b.setFileBufferSize(DFSConfigKeys.IO_FILE_BUFFER_SIZE_DEFAULT);
b.setEncryptDataTransfer(DFSConfigKeys.DFS_ENCRYPT_DATA_TRANSFER_DEFAULT);
b.setTrashInterval(DFSConfigKeys.FS_TRASH_INTERVAL_DEFAULT);
b.setChecksumType(HdfsProtos.ChecksumTypeProto.valueOf(
b.setChecksumType(HdfsProtos.ChecksumTypeProto.forNumber(
DataChecksum.Type.valueOf(DFSConfigKeys.DFS_CHECKSUM_TYPE_DEFAULT).id));
HdfsProtos.FsServerDefaultsProto proto = b.build();

View File

@ -75,6 +75,9 @@ public void tearDown() {
@Test
public void testBlockReportExceedsLengthLimit() throws Exception {
//protobuf's default limit increased to 2GB from protobuf 3.x onwards.
//So there will not be any exception thrown from protobuf.
conf.setInt(IPC_MAXIMUM_DATA_LENGTH, IPC_MAXIMUM_DATA_LENGTH_DEFAULT / 2);
initCluster();
// Create a large enough report that we expect it will go beyond the RPC
// server's length validation, and also protobuf length validation.

View File

@ -84,7 +84,7 @@
<!-- ProtocolBuffer version, used to verify the protoc version and -->
<!-- define the protobuf JAR version -->
<protobuf.version>2.5.0</protobuf.version>
<protobuf.version>3.7.1</protobuf.version>
<protoc.path>${env.HADOOP_PROTOC_PATH}</protoc.path>
<zookeeper.version>3.4.13</zookeeper.version>
@ -1918,6 +1918,9 @@
<activation>
<activeByDefault>false</activeByDefault>
</activation>
<properties>
<protoc.path>/opt/protobuf-3.7/bin/protoc</protoc.path>
</properties>
<build>
<plugins>
<plugin>

View File

@ -266,8 +266,8 @@ synchronized void writeDirEntry(DirEntry e) throws IOException {
e.writeDelimitedTo(dirs);
}
private static int getOndiskSize(com.google.protobuf.GeneratedMessage s) {
return CodedOutputStream.computeRawVarint32Size(s.getSerializedSize())
private static int getOndiskSize(com.google.protobuf.GeneratedMessageV3 s) {
return CodedOutputStream.computeUInt32SizeNoTag(s.getSerializedSize())
+ s.getSerializedSize();
}

View File

@ -55,6 +55,7 @@
<groupId>org.apache.hadoop</groupId>
<artifactId>hadoop-annotations</artifactId>
</dependency>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>

View File

@ -41,7 +41,7 @@
import org.apache.hadoop.yarn.proto.YarnProtos.SimplePlacementConstraintProto;
import org.apache.hadoop.yarn.proto.YarnProtos.TimedPlacementConstraintProto;
import com.google.protobuf.GeneratedMessage;
import com.google.protobuf.GeneratedMessageV3;
/**
* {@code PlacementConstraintToProtoConverter} generates a
@ -50,7 +50,7 @@
*/
@Private
public class PlacementConstraintToProtoConverter
implements PlacementConstraint.Visitor<GeneratedMessage> {
implements PlacementConstraint.Visitor<GeneratedMessageV3> {
private PlacementConstraint placementConstraint;
@ -65,7 +65,7 @@ public PlacementConstraintProto convert() {
}
@Override
public GeneratedMessage visit(SingleConstraint constraint) {
public GeneratedMessageV3 visit(SingleConstraint constraint) {
SimplePlacementConstraintProto.Builder sb =
SimplePlacementConstraintProto.newBuilder();
@ -94,7 +94,7 @@ public GeneratedMessage visit(SingleConstraint constraint) {
}
@Override
public GeneratedMessage visit(TargetExpression target) {
public GeneratedMessageV3 visit(TargetExpression target) {
PlacementConstraintTargetProto.Builder tb =
PlacementConstraintTargetProto.newBuilder();
@ -109,16 +109,16 @@ public GeneratedMessage visit(TargetExpression target) {
}
@Override
public GeneratedMessage visit(TargetConstraint constraint) {
public GeneratedMessageV3 visit(TargetConstraint constraint) {
throw new YarnRuntimeException("Unexpected TargetConstraint found.");
}
@Override
public GeneratedMessage visit(CardinalityConstraint constraint) {
public GeneratedMessageV3 visit(CardinalityConstraint constraint) {
throw new YarnRuntimeException("Unexpected CardinalityConstraint found.");
}
private GeneratedMessage visitAndOr(
private GeneratedMessageV3 visitAndOr(
CompositeConstraint<AbstractConstraint> composite, CompositeType type) {
CompositePlacementConstraintProto.Builder cb =
CompositePlacementConstraintProto.newBuilder();
@ -137,17 +137,17 @@ private GeneratedMessage visitAndOr(
}
@Override
public GeneratedMessage visit(And constraint) {
public GeneratedMessageV3 visit(And constraint) {
return visitAndOr(constraint, CompositeType.AND);
}
@Override
public GeneratedMessage visit(Or constraint) {
public GeneratedMessageV3 visit(Or constraint) {
return visitAndOr(constraint, CompositeType.OR);
}
@Override
public GeneratedMessage visit(DelayedOr constraint) {
public GeneratedMessageV3 visit(DelayedOr constraint) {
CompositePlacementConstraintProto.Builder cb =
CompositePlacementConstraintProto.newBuilder();
@ -166,7 +166,7 @@ public GeneratedMessage visit(DelayedOr constraint) {
}
@Override
public GeneratedMessage visit(TimedPlacementConstraint constraint) {
public GeneratedMessageV3 visit(TimedPlacementConstraint constraint) {
TimedPlacementConstraintProto.Builder tb =
TimedPlacementConstraintProto.newBuilder();

View File

@ -154,10 +154,10 @@ public void setVolumeCapability(
CsiAdaptorProtos.VolumeCapability vc =
CsiAdaptorProtos.VolumeCapability.newBuilder()
.setAccessMode(CsiAdaptorProtos.VolumeCapability
.AccessMode.valueOf(
.AccessMode.forNumber(
capability.getAccessMode().ordinal()))
.setVolumeType(CsiAdaptorProtos.VolumeCapability
.VolumeType.valueOf(capability.getVolumeType().ordinal()))
.VolumeType.forNumber(capability.getVolumeType().ordinal()))
.addAllMountFlags(capability.getMountFlags())
.build();
builder.setVolumeCapability(vc);

View File

@ -68,9 +68,9 @@ public void addVolumeCapability(VolumeCapability volumeCapability) {
CsiAdaptorProtos.VolumeCapability vc =
CsiAdaptorProtos.VolumeCapability.newBuilder()
.setAccessMode(CsiAdaptorProtos.VolumeCapability.AccessMode
.valueOf(volumeCapability.getAccessMode().ordinal()))
.forNumber(volumeCapability.getAccessMode().ordinal()))
.setVolumeType(CsiAdaptorProtos.VolumeCapability.VolumeType
.valueOf(volumeCapability.getVolumeType().ordinal()))
.forNumber(volumeCapability.getVolumeType().ordinal()))
.addAllMountFlags(volumeCapability.getMountFlags())
.build();
builder.addVolumeCapabilities(vc);