diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index 7317f4f9c9..33ae89ab5f 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -74,6 +74,8 @@ Trunk (unreleased changes)
HADOOP-7886. Add toString to FileStatus. (SreeHari via jghoman)
+ HADOOP-7899. Generate proto java files as part of the build. (tucu)
+
BUGS
HADOOP-7851. Configuration.getClasses() never returns the default value.
diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml
index ab8bf4819d..1dc545a274 100644
--- a/hadoop-common-project/hadoop-common/pom.xml
+++ b/hadoop-common-project/hadoop-common/pom.xml
@@ -294,6 +294,52 @@
org.apache.maven.plugins
maven-antrun-plugin
+
+ compile-proto
+ generate-sources
+
+ run
+
+
+
+
+ PROTO_DIR=${basedir}/src/main/proto
+ ls $PROTO_DIR &> /dev/null
+ if [ $? = 0 ]; then
+ JAVA_DIR=${project.build.directory}/generated-sources/java
+ mkdir -p $JAVA_DIR
+ ls $PROTO_DIR/*.proto | xargs -n 1 protoc -I$PROTO_DIR --java_out=$JAVA_DIR
+ fi
+
+
+
+
+
+
+
+
+ compile-test-proto
+ generate-test-sources
+
+ run
+
+
+
+
+ PROTO_DIR=${basedir}/src/test/proto
+ ls $PROTO_DIR &> /dev/null
+ if [ $? = 0 ]; then
+ JAVA_DIR=${project.build.directory}/generated-test-sources/java
+ mkdir -p $JAVA_DIR
+ ls $PROTO_DIR/*.proto | xargs -n 1 protoc -I$PROTO_DIR --java_out=$JAVA_DIR
+ fi
+
+
+
+
+
+
+
save-version
generate-sources
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protobuf/HadoopRpcProtos.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protobuf/HadoopRpcProtos.java
deleted file mode 100644
index b6d1577eb2..0000000000
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protobuf/HadoopRpcProtos.java
+++ /dev/null
@@ -1,1963 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-// Generated by the protocol buffer compiler. DO NOT EDIT!
-// source: hadoop_rpc.proto
-
-package org.apache.hadoop.ipc.protobuf;
-
-public final class HadoopRpcProtos {
- private HadoopRpcProtos() {}
- public static void registerAllExtensions(
- com.google.protobuf.ExtensionRegistry registry) {
- }
- public interface HadoopRpcRequestProtoOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
-
- // required string methodName = 1;
- boolean hasMethodName();
- String getMethodName();
-
- // optional bytes request = 2;
- boolean hasRequest();
- com.google.protobuf.ByteString getRequest();
-
- // required string declaringClassProtocolName = 3;
- boolean hasDeclaringClassProtocolName();
- String getDeclaringClassProtocolName();
-
- // required uint64 clientProtocolVersion = 4;
- boolean hasClientProtocolVersion();
- long getClientProtocolVersion();
- }
- public static final class HadoopRpcRequestProto extends
- com.google.protobuf.GeneratedMessage
- implements HadoopRpcRequestProtoOrBuilder {
- // Use HadoopRpcRequestProto.newBuilder() to construct.
- private HadoopRpcRequestProto(Builder builder) {
- super(builder);
- }
- private HadoopRpcRequestProto(boolean noInit) {}
-
- private static final HadoopRpcRequestProto defaultInstance;
- public static HadoopRpcRequestProto getDefaultInstance() {
- return defaultInstance;
- }
-
- public HadoopRpcRequestProto getDefaultInstanceForType() {
- return defaultInstance;
- }
-
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.internal_static_HadoopRpcRequestProto_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.internal_static_HadoopRpcRequestProto_fieldAccessorTable;
- }
-
- private int bitField0_;
- // required string methodName = 1;
- public static final int METHODNAME_FIELD_NUMBER = 1;
- private java.lang.Object methodName_;
- public boolean hasMethodName() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- public String getMethodName() {
- java.lang.Object ref = methodName_;
- if (ref instanceof String) {
- return (String) ref;
- } else {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
- String s = bs.toStringUtf8();
- if (com.google.protobuf.Internal.isValidUtf8(bs)) {
- methodName_ = s;
- }
- return s;
- }
- }
- private com.google.protobuf.ByteString getMethodNameBytes() {
- java.lang.Object ref = methodName_;
- if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8((String) ref);
- methodName_ = b;
- return b;
- } else {
- return (com.google.protobuf.ByteString) ref;
- }
- }
-
- // optional bytes request = 2;
- public static final int REQUEST_FIELD_NUMBER = 2;
- private com.google.protobuf.ByteString request_;
- public boolean hasRequest() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
- }
- public com.google.protobuf.ByteString getRequest() {
- return request_;
- }
-
- // required string declaringClassProtocolName = 3;
- public static final int DECLARINGCLASSPROTOCOLNAME_FIELD_NUMBER = 3;
- private java.lang.Object declaringClassProtocolName_;
- public boolean hasDeclaringClassProtocolName() {
- return ((bitField0_ & 0x00000004) == 0x00000004);
- }
- public String getDeclaringClassProtocolName() {
- java.lang.Object ref = declaringClassProtocolName_;
- if (ref instanceof String) {
- return (String) ref;
- } else {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
- String s = bs.toStringUtf8();
- if (com.google.protobuf.Internal.isValidUtf8(bs)) {
- declaringClassProtocolName_ = s;
- }
- return s;
- }
- }
- private com.google.protobuf.ByteString getDeclaringClassProtocolNameBytes() {
- java.lang.Object ref = declaringClassProtocolName_;
- if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8((String) ref);
- declaringClassProtocolName_ = b;
- return b;
- } else {
- return (com.google.protobuf.ByteString) ref;
- }
- }
-
- // required uint64 clientProtocolVersion = 4;
- public static final int CLIENTPROTOCOLVERSION_FIELD_NUMBER = 4;
- private long clientProtocolVersion_;
- public boolean hasClientProtocolVersion() {
- return ((bitField0_ & 0x00000008) == 0x00000008);
- }
- public long getClientProtocolVersion() {
- return clientProtocolVersion_;
- }
-
- private void initFields() {
- methodName_ = "";
- request_ = com.google.protobuf.ByteString.EMPTY;
- declaringClassProtocolName_ = "";
- clientProtocolVersion_ = 0L;
- }
- private byte memoizedIsInitialized = -1;
- public final boolean isInitialized() {
- byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
-
- if (!hasMethodName()) {
- memoizedIsInitialized = 0;
- return false;
- }
- if (!hasDeclaringClassProtocolName()) {
- memoizedIsInitialized = 0;
- return false;
- }
- if (!hasClientProtocolVersion()) {
- memoizedIsInitialized = 0;
- return false;
- }
- memoizedIsInitialized = 1;
- return true;
- }
-
- public void writeTo(com.google.protobuf.CodedOutputStream output)
- throws java.io.IOException {
- getSerializedSize();
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- output.writeBytes(1, getMethodNameBytes());
- }
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- output.writeBytes(2, request_);
- }
- if (((bitField0_ & 0x00000004) == 0x00000004)) {
- output.writeBytes(3, getDeclaringClassProtocolNameBytes());
- }
- if (((bitField0_ & 0x00000008) == 0x00000008)) {
- output.writeUInt64(4, clientProtocolVersion_);
- }
- getUnknownFields().writeTo(output);
- }
-
- private int memoizedSerializedSize = -1;
- public int getSerializedSize() {
- int size = memoizedSerializedSize;
- if (size != -1) return size;
-
- size = 0;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(1, getMethodNameBytes());
- }
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(2, request_);
- }
- if (((bitField0_ & 0x00000004) == 0x00000004)) {
- size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(3, getDeclaringClassProtocolNameBytes());
- }
- if (((bitField0_ & 0x00000008) == 0x00000008)) {
- size += com.google.protobuf.CodedOutputStream
- .computeUInt64Size(4, clientProtocolVersion_);
- }
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
- return size;
- }
-
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
- @java.lang.Override
- public boolean equals(final java.lang.Object obj) {
- if (obj == this) {
- return true;
- }
- if (!(obj instanceof org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto)) {
- return super.equals(obj);
- }
- org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto other = (org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto) obj;
-
- boolean result = true;
- result = result && (hasMethodName() == other.hasMethodName());
- if (hasMethodName()) {
- result = result && getMethodName()
- .equals(other.getMethodName());
- }
- result = result && (hasRequest() == other.hasRequest());
- if (hasRequest()) {
- result = result && getRequest()
- .equals(other.getRequest());
- }
- result = result && (hasDeclaringClassProtocolName() == other.hasDeclaringClassProtocolName());
- if (hasDeclaringClassProtocolName()) {
- result = result && getDeclaringClassProtocolName()
- .equals(other.getDeclaringClassProtocolName());
- }
- result = result && (hasClientProtocolVersion() == other.hasClientProtocolVersion());
- if (hasClientProtocolVersion()) {
- result = result && (getClientProtocolVersion()
- == other.getClientProtocolVersion());
- }
- result = result &&
- getUnknownFields().equals(other.getUnknownFields());
- return result;
- }
-
- @java.lang.Override
- public int hashCode() {
- int hash = 41;
- hash = (19 * hash) + getDescriptorForType().hashCode();
- if (hasMethodName()) {
- hash = (37 * hash) + METHODNAME_FIELD_NUMBER;
- hash = (53 * hash) + getMethodName().hashCode();
- }
- if (hasRequest()) {
- hash = (37 * hash) + REQUEST_FIELD_NUMBER;
- hash = (53 * hash) + getRequest().hashCode();
- }
- if (hasDeclaringClassProtocolName()) {
- hash = (37 * hash) + DECLARINGCLASSPROTOCOLNAME_FIELD_NUMBER;
- hash = (53 * hash) + getDeclaringClassProtocolName().hashCode();
- }
- if (hasClientProtocolVersion()) {
- hash = (37 * hash) + CLIENTPROTOCOLVERSION_FIELD_NUMBER;
- hash = (53 * hash) + hashLong(getClientProtocolVersion());
- }
- hash = (29 * hash) + getUnknownFields().hashCode();
- return hash;
- }
-
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto parseFrom(
- byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto parseFrom(java.io.InputStream input)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto parseFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto parseDelimitedFrom(java.io.InputStream input)
- throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input)) {
- return builder.buildParsed();
- } else {
- return null;
- }
- }
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto parseDelimitedFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
- return builder.buildParsed();
- } else {
- return null;
- }
- }
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto parseFrom(
- com.google.protobuf.CodedInputStream input)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
- }
-
- public static Builder newBuilder() { return Builder.create(); }
- public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto prototype) {
- return newBuilder().mergeFrom(prototype);
- }
- public Builder toBuilder() { return newBuilder(this); }
-
- @java.lang.Override
- protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- Builder builder = new Builder(parent);
- return builder;
- }
- public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder
- implements org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProtoOrBuilder {
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.internal_static_HadoopRpcRequestProto_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.internal_static_HadoopRpcRequestProto_fieldAccessorTable;
- }
-
- // Construct using org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto.newBuilder()
- private Builder() {
- maybeForceBuilderInitialization();
- }
-
- private Builder(BuilderParent parent) {
- super(parent);
- maybeForceBuilderInitialization();
- }
- private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
- }
- }
- private static Builder create() {
- return new Builder();
- }
-
- public Builder clear() {
- super.clear();
- methodName_ = "";
- bitField0_ = (bitField0_ & ~0x00000001);
- request_ = com.google.protobuf.ByteString.EMPTY;
- bitField0_ = (bitField0_ & ~0x00000002);
- declaringClassProtocolName_ = "";
- bitField0_ = (bitField0_ & ~0x00000004);
- clientProtocolVersion_ = 0L;
- bitField0_ = (bitField0_ & ~0x00000008);
- return this;
- }
-
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
- public com.google.protobuf.Descriptors.Descriptor
- getDescriptorForType() {
- return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto.getDescriptor();
- }
-
- public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto getDefaultInstanceForType() {
- return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto.getDefaultInstance();
- }
-
- public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto build() {
- org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(result);
- }
- return result;
- }
-
- private org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto buildParsed()
- throws com.google.protobuf.InvalidProtocolBufferException {
- org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(
- result).asInvalidProtocolBufferException();
- }
- return result;
- }
-
- public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto buildPartial() {
- org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto result = new org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto(this);
- int from_bitField0_ = bitField0_;
- int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
- to_bitField0_ |= 0x00000001;
- }
- result.methodName_ = methodName_;
- if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
- to_bitField0_ |= 0x00000002;
- }
- result.request_ = request_;
- if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
- to_bitField0_ |= 0x00000004;
- }
- result.declaringClassProtocolName_ = declaringClassProtocolName_;
- if (((from_bitField0_ & 0x00000008) == 0x00000008)) {
- to_bitField0_ |= 0x00000008;
- }
- result.clientProtocolVersion_ = clientProtocolVersion_;
- result.bitField0_ = to_bitField0_;
- onBuilt();
- return result;
- }
-
- public Builder mergeFrom(com.google.protobuf.Message other) {
- if (other instanceof org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto) {
- return mergeFrom((org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto)other);
- } else {
- super.mergeFrom(other);
- return this;
- }
- }
-
- public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto other) {
- if (other == org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto.getDefaultInstance()) return this;
- if (other.hasMethodName()) {
- setMethodName(other.getMethodName());
- }
- if (other.hasRequest()) {
- setRequest(other.getRequest());
- }
- if (other.hasDeclaringClassProtocolName()) {
- setDeclaringClassProtocolName(other.getDeclaringClassProtocolName());
- }
- if (other.hasClientProtocolVersion()) {
- setClientProtocolVersion(other.getClientProtocolVersion());
- }
- this.mergeUnknownFields(other.getUnknownFields());
- return this;
- }
-
- public final boolean isInitialized() {
- if (!hasMethodName()) {
-
- return false;
- }
- if (!hasDeclaringClassProtocolName()) {
-
- return false;
- }
- if (!hasClientProtocolVersion()) {
-
- return false;
- }
- return true;
- }
-
- public Builder mergeFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder(
- this.getUnknownFields());
- while (true) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- }
- break;
- }
- case 10: {
- bitField0_ |= 0x00000001;
- methodName_ = input.readBytes();
- break;
- }
- case 18: {
- bitField0_ |= 0x00000002;
- request_ = input.readBytes();
- break;
- }
- case 26: {
- bitField0_ |= 0x00000004;
- declaringClassProtocolName_ = input.readBytes();
- break;
- }
- case 32: {
- bitField0_ |= 0x00000008;
- clientProtocolVersion_ = input.readUInt64();
- break;
- }
- }
- }
- }
-
- private int bitField0_;
-
- // required string methodName = 1;
- private java.lang.Object methodName_ = "";
- public boolean hasMethodName() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- public String getMethodName() {
- java.lang.Object ref = methodName_;
- if (!(ref instanceof String)) {
- String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
- methodName_ = s;
- return s;
- } else {
- return (String) ref;
- }
- }
- public Builder setMethodName(String value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000001;
- methodName_ = value;
- onChanged();
- return this;
- }
- public Builder clearMethodName() {
- bitField0_ = (bitField0_ & ~0x00000001);
- methodName_ = getDefaultInstance().getMethodName();
- onChanged();
- return this;
- }
- void setMethodName(com.google.protobuf.ByteString value) {
- bitField0_ |= 0x00000001;
- methodName_ = value;
- onChanged();
- }
-
- // optional bytes request = 2;
- private com.google.protobuf.ByteString request_ = com.google.protobuf.ByteString.EMPTY;
- public boolean hasRequest() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
- }
- public com.google.protobuf.ByteString getRequest() {
- return request_;
- }
- public Builder setRequest(com.google.protobuf.ByteString value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000002;
- request_ = value;
- onChanged();
- return this;
- }
- public Builder clearRequest() {
- bitField0_ = (bitField0_ & ~0x00000002);
- request_ = getDefaultInstance().getRequest();
- onChanged();
- return this;
- }
-
- // required string declaringClassProtocolName = 3;
- private java.lang.Object declaringClassProtocolName_ = "";
- public boolean hasDeclaringClassProtocolName() {
- return ((bitField0_ & 0x00000004) == 0x00000004);
- }
- public String getDeclaringClassProtocolName() {
- java.lang.Object ref = declaringClassProtocolName_;
- if (!(ref instanceof String)) {
- String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
- declaringClassProtocolName_ = s;
- return s;
- } else {
- return (String) ref;
- }
- }
- public Builder setDeclaringClassProtocolName(String value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000004;
- declaringClassProtocolName_ = value;
- onChanged();
- return this;
- }
- public Builder clearDeclaringClassProtocolName() {
- bitField0_ = (bitField0_ & ~0x00000004);
- declaringClassProtocolName_ = getDefaultInstance().getDeclaringClassProtocolName();
- onChanged();
- return this;
- }
- void setDeclaringClassProtocolName(com.google.protobuf.ByteString value) {
- bitField0_ |= 0x00000004;
- declaringClassProtocolName_ = value;
- onChanged();
- }
-
- // required uint64 clientProtocolVersion = 4;
- private long clientProtocolVersion_ ;
- public boolean hasClientProtocolVersion() {
- return ((bitField0_ & 0x00000008) == 0x00000008);
- }
- public long getClientProtocolVersion() {
- return clientProtocolVersion_;
- }
- public Builder setClientProtocolVersion(long value) {
- bitField0_ |= 0x00000008;
- clientProtocolVersion_ = value;
- onChanged();
- return this;
- }
- public Builder clearClientProtocolVersion() {
- bitField0_ = (bitField0_ & ~0x00000008);
- clientProtocolVersion_ = 0L;
- onChanged();
- return this;
- }
-
- // @@protoc_insertion_point(builder_scope:HadoopRpcRequestProto)
- }
-
- static {
- defaultInstance = new HadoopRpcRequestProto(true);
- defaultInstance.initFields();
- }
-
- // @@protoc_insertion_point(class_scope:HadoopRpcRequestProto)
- }
-
- public interface HadoopRpcExceptionProtoOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
-
- // optional string exceptionName = 1;
- boolean hasExceptionName();
- String getExceptionName();
-
- // optional string stackTrace = 2;
- boolean hasStackTrace();
- String getStackTrace();
- }
- public static final class HadoopRpcExceptionProto extends
- com.google.protobuf.GeneratedMessage
- implements HadoopRpcExceptionProtoOrBuilder {
- // Use HadoopRpcExceptionProto.newBuilder() to construct.
- private HadoopRpcExceptionProto(Builder builder) {
- super(builder);
- }
- private HadoopRpcExceptionProto(boolean noInit) {}
-
- private static final HadoopRpcExceptionProto defaultInstance;
- public static HadoopRpcExceptionProto getDefaultInstance() {
- return defaultInstance;
- }
-
- public HadoopRpcExceptionProto getDefaultInstanceForType() {
- return defaultInstance;
- }
-
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.internal_static_HadoopRpcExceptionProto_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.internal_static_HadoopRpcExceptionProto_fieldAccessorTable;
- }
-
- private int bitField0_;
- // optional string exceptionName = 1;
- public static final int EXCEPTIONNAME_FIELD_NUMBER = 1;
- private java.lang.Object exceptionName_;
- public boolean hasExceptionName() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- public String getExceptionName() {
- java.lang.Object ref = exceptionName_;
- if (ref instanceof String) {
- return (String) ref;
- } else {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
- String s = bs.toStringUtf8();
- if (com.google.protobuf.Internal.isValidUtf8(bs)) {
- exceptionName_ = s;
- }
- return s;
- }
- }
- private com.google.protobuf.ByteString getExceptionNameBytes() {
- java.lang.Object ref = exceptionName_;
- if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8((String) ref);
- exceptionName_ = b;
- return b;
- } else {
- return (com.google.protobuf.ByteString) ref;
- }
- }
-
- // optional string stackTrace = 2;
- public static final int STACKTRACE_FIELD_NUMBER = 2;
- private java.lang.Object stackTrace_;
- public boolean hasStackTrace() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
- }
- public String getStackTrace() {
- java.lang.Object ref = stackTrace_;
- if (ref instanceof String) {
- return (String) ref;
- } else {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
- String s = bs.toStringUtf8();
- if (com.google.protobuf.Internal.isValidUtf8(bs)) {
- stackTrace_ = s;
- }
- return s;
- }
- }
- private com.google.protobuf.ByteString getStackTraceBytes() {
- java.lang.Object ref = stackTrace_;
- if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8((String) ref);
- stackTrace_ = b;
- return b;
- } else {
- return (com.google.protobuf.ByteString) ref;
- }
- }
-
- private void initFields() {
- exceptionName_ = "";
- stackTrace_ = "";
- }
- private byte memoizedIsInitialized = -1;
- public final boolean isInitialized() {
- byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
-
- memoizedIsInitialized = 1;
- return true;
- }
-
- public void writeTo(com.google.protobuf.CodedOutputStream output)
- throws java.io.IOException {
- getSerializedSize();
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- output.writeBytes(1, getExceptionNameBytes());
- }
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- output.writeBytes(2, getStackTraceBytes());
- }
- getUnknownFields().writeTo(output);
- }
-
- private int memoizedSerializedSize = -1;
- public int getSerializedSize() {
- int size = memoizedSerializedSize;
- if (size != -1) return size;
-
- size = 0;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(1, getExceptionNameBytes());
- }
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(2, getStackTraceBytes());
- }
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
- return size;
- }
-
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
- @java.lang.Override
- public boolean equals(final java.lang.Object obj) {
- if (obj == this) {
- return true;
- }
- if (!(obj instanceof org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto)) {
- return super.equals(obj);
- }
- org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto other = (org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto) obj;
-
- boolean result = true;
- result = result && (hasExceptionName() == other.hasExceptionName());
- if (hasExceptionName()) {
- result = result && getExceptionName()
- .equals(other.getExceptionName());
- }
- result = result && (hasStackTrace() == other.hasStackTrace());
- if (hasStackTrace()) {
- result = result && getStackTrace()
- .equals(other.getStackTrace());
- }
- result = result &&
- getUnknownFields().equals(other.getUnknownFields());
- return result;
- }
-
- @java.lang.Override
- public int hashCode() {
- int hash = 41;
- hash = (19 * hash) + getDescriptorForType().hashCode();
- if (hasExceptionName()) {
- hash = (37 * hash) + EXCEPTIONNAME_FIELD_NUMBER;
- hash = (53 * hash) + getExceptionName().hashCode();
- }
- if (hasStackTrace()) {
- hash = (37 * hash) + STACKTRACE_FIELD_NUMBER;
- hash = (53 * hash) + getStackTrace().hashCode();
- }
- hash = (29 * hash) + getUnknownFields().hashCode();
- return hash;
- }
-
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto parseFrom(
- byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto parseFrom(java.io.InputStream input)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto parseFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto parseDelimitedFrom(java.io.InputStream input)
- throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input)) {
- return builder.buildParsed();
- } else {
- return null;
- }
- }
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto parseDelimitedFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
- return builder.buildParsed();
- } else {
- return null;
- }
- }
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto parseFrom(
- com.google.protobuf.CodedInputStream input)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
- }
-
- public static Builder newBuilder() { return Builder.create(); }
- public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto prototype) {
- return newBuilder().mergeFrom(prototype);
- }
- public Builder toBuilder() { return newBuilder(this); }
-
- @java.lang.Override
- protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- Builder builder = new Builder(parent);
- return builder;
- }
- public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder
- implements org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProtoOrBuilder {
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.internal_static_HadoopRpcExceptionProto_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.internal_static_HadoopRpcExceptionProto_fieldAccessorTable;
- }
-
- // Construct using org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.newBuilder()
- private Builder() {
- maybeForceBuilderInitialization();
- }
-
- private Builder(BuilderParent parent) {
- super(parent);
- maybeForceBuilderInitialization();
- }
- private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
- }
- }
- private static Builder create() {
- return new Builder();
- }
-
- public Builder clear() {
- super.clear();
- exceptionName_ = "";
- bitField0_ = (bitField0_ & ~0x00000001);
- stackTrace_ = "";
- bitField0_ = (bitField0_ & ~0x00000002);
- return this;
- }
-
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
- public com.google.protobuf.Descriptors.Descriptor
- getDescriptorForType() {
- return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.getDescriptor();
- }
-
- public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto getDefaultInstanceForType() {
- return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.getDefaultInstance();
- }
-
- public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto build() {
- org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(result);
- }
- return result;
- }
-
- private org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto buildParsed()
- throws com.google.protobuf.InvalidProtocolBufferException {
- org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(
- result).asInvalidProtocolBufferException();
- }
- return result;
- }
-
- public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto buildPartial() {
- org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto result = new org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto(this);
- int from_bitField0_ = bitField0_;
- int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
- to_bitField0_ |= 0x00000001;
- }
- result.exceptionName_ = exceptionName_;
- if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
- to_bitField0_ |= 0x00000002;
- }
- result.stackTrace_ = stackTrace_;
- result.bitField0_ = to_bitField0_;
- onBuilt();
- return result;
- }
-
- public Builder mergeFrom(com.google.protobuf.Message other) {
- if (other instanceof org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto) {
- return mergeFrom((org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto)other);
- } else {
- super.mergeFrom(other);
- return this;
- }
- }
-
- public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto other) {
- if (other == org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.getDefaultInstance()) return this;
- if (other.hasExceptionName()) {
- setExceptionName(other.getExceptionName());
- }
- if (other.hasStackTrace()) {
- setStackTrace(other.getStackTrace());
- }
- this.mergeUnknownFields(other.getUnknownFields());
- return this;
- }
-
- public final boolean isInitialized() {
- return true;
- }
-
- public Builder mergeFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder(
- this.getUnknownFields());
- while (true) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- }
- break;
- }
- case 10: {
- bitField0_ |= 0x00000001;
- exceptionName_ = input.readBytes();
- break;
- }
- case 18: {
- bitField0_ |= 0x00000002;
- stackTrace_ = input.readBytes();
- break;
- }
- }
- }
- }
-
- private int bitField0_;
-
- // optional string exceptionName = 1;
- private java.lang.Object exceptionName_ = "";
- public boolean hasExceptionName() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- public String getExceptionName() {
- java.lang.Object ref = exceptionName_;
- if (!(ref instanceof String)) {
- String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
- exceptionName_ = s;
- return s;
- } else {
- return (String) ref;
- }
- }
- public Builder setExceptionName(String value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000001;
- exceptionName_ = value;
- onChanged();
- return this;
- }
- public Builder clearExceptionName() {
- bitField0_ = (bitField0_ & ~0x00000001);
- exceptionName_ = getDefaultInstance().getExceptionName();
- onChanged();
- return this;
- }
- void setExceptionName(com.google.protobuf.ByteString value) {
- bitField0_ |= 0x00000001;
- exceptionName_ = value;
- onChanged();
- }
-
- // optional string stackTrace = 2;
- private java.lang.Object stackTrace_ = "";
- public boolean hasStackTrace() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
- }
- public String getStackTrace() {
- java.lang.Object ref = stackTrace_;
- if (!(ref instanceof String)) {
- String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
- stackTrace_ = s;
- return s;
- } else {
- return (String) ref;
- }
- }
- public Builder setStackTrace(String value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000002;
- stackTrace_ = value;
- onChanged();
- return this;
- }
- public Builder clearStackTrace() {
- bitField0_ = (bitField0_ & ~0x00000002);
- stackTrace_ = getDefaultInstance().getStackTrace();
- onChanged();
- return this;
- }
- void setStackTrace(com.google.protobuf.ByteString value) {
- bitField0_ |= 0x00000002;
- stackTrace_ = value;
- onChanged();
- }
-
- // @@protoc_insertion_point(builder_scope:HadoopRpcExceptionProto)
- }
-
- static {
- defaultInstance = new HadoopRpcExceptionProto(true);
- defaultInstance.initFields();
- }
-
- // @@protoc_insertion_point(class_scope:HadoopRpcExceptionProto)
- }
-
- public interface HadoopRpcResponseProtoOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
-
- // required .HadoopRpcResponseProto.ResponseStatus status = 1;
- boolean hasStatus();
- org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.ResponseStatus getStatus();
-
- // optional bytes response = 2;
- boolean hasResponse();
- com.google.protobuf.ByteString getResponse();
-
- // optional .HadoopRpcExceptionProto exception = 3;
- boolean hasException();
- org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto getException();
- org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProtoOrBuilder getExceptionOrBuilder();
- }
- public static final class HadoopRpcResponseProto extends
- com.google.protobuf.GeneratedMessage
- implements HadoopRpcResponseProtoOrBuilder {
- // Use HadoopRpcResponseProto.newBuilder() to construct.
- private HadoopRpcResponseProto(Builder builder) {
- super(builder);
- }
- private HadoopRpcResponseProto(boolean noInit) {}
-
- private static final HadoopRpcResponseProto defaultInstance;
- public static HadoopRpcResponseProto getDefaultInstance() {
- return defaultInstance;
- }
-
- public HadoopRpcResponseProto getDefaultInstanceForType() {
- return defaultInstance;
- }
-
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.internal_static_HadoopRpcResponseProto_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.internal_static_HadoopRpcResponseProto_fieldAccessorTable;
- }
-
- public enum ResponseStatus
- implements com.google.protobuf.ProtocolMessageEnum {
- SUCCESS(0, 1),
- ERRROR(1, 2),
- ;
-
- public static final int SUCCESS_VALUE = 1;
- public static final int ERRROR_VALUE = 2;
-
-
- public final int getNumber() { return value; }
-
- public static ResponseStatus valueOf(int value) {
- switch (value) {
- case 1: return SUCCESS;
- case 2: return ERRROR;
- default: return null;
- }
- }
-
- public static com.google.protobuf.Internal.EnumLiteMap
- internalGetValueMap() {
- return internalValueMap;
- }
- private static com.google.protobuf.Internal.EnumLiteMap
- internalValueMap =
- new com.google.protobuf.Internal.EnumLiteMap() {
- public ResponseStatus findValueByNumber(int number) {
- return ResponseStatus.valueOf(number);
- }
- };
-
- public final com.google.protobuf.Descriptors.EnumValueDescriptor
- getValueDescriptor() {
- return getDescriptor().getValues().get(index);
- }
- public final com.google.protobuf.Descriptors.EnumDescriptor
- getDescriptorForType() {
- return getDescriptor();
- }
- public static final com.google.protobuf.Descriptors.EnumDescriptor
- getDescriptor() {
- return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.getDescriptor().getEnumTypes().get(0);
- }
-
- private static final ResponseStatus[] VALUES = {
- SUCCESS, ERRROR,
- };
-
- public static ResponseStatus valueOf(
- com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
- if (desc.getType() != getDescriptor()) {
- throw new java.lang.IllegalArgumentException(
- "EnumValueDescriptor is not for this type.");
- }
- return VALUES[desc.getIndex()];
- }
-
- private final int index;
- private final int value;
-
- private ResponseStatus(int index, int value) {
- this.index = index;
- this.value = value;
- }
-
- // @@protoc_insertion_point(enum_scope:HadoopRpcResponseProto.ResponseStatus)
- }
-
- private int bitField0_;
- // required .HadoopRpcResponseProto.ResponseStatus status = 1;
- public static final int STATUS_FIELD_NUMBER = 1;
- private org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.ResponseStatus status_;
- public boolean hasStatus() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.ResponseStatus getStatus() {
- return status_;
- }
-
- // optional bytes response = 2;
- public static final int RESPONSE_FIELD_NUMBER = 2;
- private com.google.protobuf.ByteString response_;
- public boolean hasResponse() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
- }
- public com.google.protobuf.ByteString getResponse() {
- return response_;
- }
-
- // optional .HadoopRpcExceptionProto exception = 3;
- public static final int EXCEPTION_FIELD_NUMBER = 3;
- private org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto exception_;
- public boolean hasException() {
- return ((bitField0_ & 0x00000004) == 0x00000004);
- }
- public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto getException() {
- return exception_;
- }
- public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProtoOrBuilder getExceptionOrBuilder() {
- return exception_;
- }
-
- private void initFields() {
- status_ = org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.ResponseStatus.SUCCESS;
- response_ = com.google.protobuf.ByteString.EMPTY;
- exception_ = org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.getDefaultInstance();
- }
- private byte memoizedIsInitialized = -1;
- public final boolean isInitialized() {
- byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
-
- if (!hasStatus()) {
- memoizedIsInitialized = 0;
- return false;
- }
- memoizedIsInitialized = 1;
- return true;
- }
-
- public void writeTo(com.google.protobuf.CodedOutputStream output)
- throws java.io.IOException {
- getSerializedSize();
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- output.writeEnum(1, status_.getNumber());
- }
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- output.writeBytes(2, response_);
- }
- if (((bitField0_ & 0x00000004) == 0x00000004)) {
- output.writeMessage(3, exception_);
- }
- getUnknownFields().writeTo(output);
- }
-
- private int memoizedSerializedSize = -1;
- public int getSerializedSize() {
- int size = memoizedSerializedSize;
- if (size != -1) return size;
-
- size = 0;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
- .computeEnumSize(1, status_.getNumber());
- }
- if (((bitField0_ & 0x00000002) == 0x00000002)) {
- size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(2, response_);
- }
- if (((bitField0_ & 0x00000004) == 0x00000004)) {
- size += com.google.protobuf.CodedOutputStream
- .computeMessageSize(3, exception_);
- }
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
- return size;
- }
-
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
- @java.lang.Override
- public boolean equals(final java.lang.Object obj) {
- if (obj == this) {
- return true;
- }
- if (!(obj instanceof org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto)) {
- return super.equals(obj);
- }
- org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto other = (org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto) obj;
-
- boolean result = true;
- result = result && (hasStatus() == other.hasStatus());
- if (hasStatus()) {
- result = result &&
- (getStatus() == other.getStatus());
- }
- result = result && (hasResponse() == other.hasResponse());
- if (hasResponse()) {
- result = result && getResponse()
- .equals(other.getResponse());
- }
- result = result && (hasException() == other.hasException());
- if (hasException()) {
- result = result && getException()
- .equals(other.getException());
- }
- result = result &&
- getUnknownFields().equals(other.getUnknownFields());
- return result;
- }
-
- @java.lang.Override
- public int hashCode() {
- int hash = 41;
- hash = (19 * hash) + getDescriptorForType().hashCode();
- if (hasStatus()) {
- hash = (37 * hash) + STATUS_FIELD_NUMBER;
- hash = (53 * hash) + hashEnum(getStatus());
- }
- if (hasResponse()) {
- hash = (37 * hash) + RESPONSE_FIELD_NUMBER;
- hash = (53 * hash) + getResponse().hashCode();
- }
- if (hasException()) {
- hash = (37 * hash) + EXCEPTION_FIELD_NUMBER;
- hash = (53 * hash) + getException().hashCode();
- }
- hash = (29 * hash) + getUnknownFields().hashCode();
- return hash;
- }
-
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto parseFrom(
- byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto parseFrom(java.io.InputStream input)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto parseFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto parseDelimitedFrom(java.io.InputStream input)
- throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input)) {
- return builder.buildParsed();
- } else {
- return null;
- }
- }
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto parseDelimitedFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
- return builder.buildParsed();
- } else {
- return null;
- }
- }
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto parseFrom(
- com.google.protobuf.CodedInputStream input)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
- }
-
- public static Builder newBuilder() { return Builder.create(); }
- public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto prototype) {
- return newBuilder().mergeFrom(prototype);
- }
- public Builder toBuilder() { return newBuilder(this); }
-
- @java.lang.Override
- protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- Builder builder = new Builder(parent);
- return builder;
- }
- public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder
- implements org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProtoOrBuilder {
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.internal_static_HadoopRpcResponseProto_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.internal_static_HadoopRpcResponseProto_fieldAccessorTable;
- }
-
- // Construct using org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.newBuilder()
- private Builder() {
- maybeForceBuilderInitialization();
- }
-
- private Builder(BuilderParent parent) {
- super(parent);
- maybeForceBuilderInitialization();
- }
- private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
- getExceptionFieldBuilder();
- }
- }
- private static Builder create() {
- return new Builder();
- }
-
- public Builder clear() {
- super.clear();
- status_ = org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.ResponseStatus.SUCCESS;
- bitField0_ = (bitField0_ & ~0x00000001);
- response_ = com.google.protobuf.ByteString.EMPTY;
- bitField0_ = (bitField0_ & ~0x00000002);
- if (exceptionBuilder_ == null) {
- exception_ = org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.getDefaultInstance();
- } else {
- exceptionBuilder_.clear();
- }
- bitField0_ = (bitField0_ & ~0x00000004);
- return this;
- }
-
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
- public com.google.protobuf.Descriptors.Descriptor
- getDescriptorForType() {
- return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.getDescriptor();
- }
-
- public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto getDefaultInstanceForType() {
- return org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.getDefaultInstance();
- }
-
- public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto build() {
- org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(result);
- }
- return result;
- }
-
- private org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto buildParsed()
- throws com.google.protobuf.InvalidProtocolBufferException {
- org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(
- result).asInvalidProtocolBufferException();
- }
- return result;
- }
-
- public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto buildPartial() {
- org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto result = new org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto(this);
- int from_bitField0_ = bitField0_;
- int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
- to_bitField0_ |= 0x00000001;
- }
- result.status_ = status_;
- if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
- to_bitField0_ |= 0x00000002;
- }
- result.response_ = response_;
- if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
- to_bitField0_ |= 0x00000004;
- }
- if (exceptionBuilder_ == null) {
- result.exception_ = exception_;
- } else {
- result.exception_ = exceptionBuilder_.build();
- }
- result.bitField0_ = to_bitField0_;
- onBuilt();
- return result;
- }
-
- public Builder mergeFrom(com.google.protobuf.Message other) {
- if (other instanceof org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto) {
- return mergeFrom((org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto)other);
- } else {
- super.mergeFrom(other);
- return this;
- }
- }
-
- public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto other) {
- if (other == org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.getDefaultInstance()) return this;
- if (other.hasStatus()) {
- setStatus(other.getStatus());
- }
- if (other.hasResponse()) {
- setResponse(other.getResponse());
- }
- if (other.hasException()) {
- mergeException(other.getException());
- }
- this.mergeUnknownFields(other.getUnknownFields());
- return this;
- }
-
- public final boolean isInitialized() {
- if (!hasStatus()) {
-
- return false;
- }
- return true;
- }
-
- public Builder mergeFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder(
- this.getUnknownFields());
- while (true) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- }
- break;
- }
- case 8: {
- int rawValue = input.readEnum();
- org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.ResponseStatus value = org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.ResponseStatus.valueOf(rawValue);
- if (value == null) {
- unknownFields.mergeVarintField(1, rawValue);
- } else {
- bitField0_ |= 0x00000001;
- status_ = value;
- }
- break;
- }
- case 18: {
- bitField0_ |= 0x00000002;
- response_ = input.readBytes();
- break;
- }
- case 26: {
- org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.Builder subBuilder = org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.newBuilder();
- if (hasException()) {
- subBuilder.mergeFrom(getException());
- }
- input.readMessage(subBuilder, extensionRegistry);
- setException(subBuilder.buildPartial());
- break;
- }
- }
- }
- }
-
- private int bitField0_;
-
- // required .HadoopRpcResponseProto.ResponseStatus status = 1;
- private org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.ResponseStatus status_ = org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.ResponseStatus.SUCCESS;
- public boolean hasStatus() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.ResponseStatus getStatus() {
- return status_;
- }
- public Builder setStatus(org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.ResponseStatus value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000001;
- status_ = value;
- onChanged();
- return this;
- }
- public Builder clearStatus() {
- bitField0_ = (bitField0_ & ~0x00000001);
- status_ = org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.ResponseStatus.SUCCESS;
- onChanged();
- return this;
- }
-
- // optional bytes response = 2;
- private com.google.protobuf.ByteString response_ = com.google.protobuf.ByteString.EMPTY;
- public boolean hasResponse() {
- return ((bitField0_ & 0x00000002) == 0x00000002);
- }
- public com.google.protobuf.ByteString getResponse() {
- return response_;
- }
- public Builder setResponse(com.google.protobuf.ByteString value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000002;
- response_ = value;
- onChanged();
- return this;
- }
- public Builder clearResponse() {
- bitField0_ = (bitField0_ & ~0x00000002);
- response_ = getDefaultInstance().getResponse();
- onChanged();
- return this;
- }
-
- // optional .HadoopRpcExceptionProto exception = 3;
- private org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto exception_ = org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.getDefaultInstance();
- private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto, org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.Builder, org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProtoOrBuilder> exceptionBuilder_;
- public boolean hasException() {
- return ((bitField0_ & 0x00000004) == 0x00000004);
- }
- public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto getException() {
- if (exceptionBuilder_ == null) {
- return exception_;
- } else {
- return exceptionBuilder_.getMessage();
- }
- }
- public Builder setException(org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto value) {
- if (exceptionBuilder_ == null) {
- if (value == null) {
- throw new NullPointerException();
- }
- exception_ = value;
- onChanged();
- } else {
- exceptionBuilder_.setMessage(value);
- }
- bitField0_ |= 0x00000004;
- return this;
- }
- public Builder setException(
- org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.Builder builderForValue) {
- if (exceptionBuilder_ == null) {
- exception_ = builderForValue.build();
- onChanged();
- } else {
- exceptionBuilder_.setMessage(builderForValue.build());
- }
- bitField0_ |= 0x00000004;
- return this;
- }
- public Builder mergeException(org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto value) {
- if (exceptionBuilder_ == null) {
- if (((bitField0_ & 0x00000004) == 0x00000004) &&
- exception_ != org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.getDefaultInstance()) {
- exception_ =
- org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.newBuilder(exception_).mergeFrom(value).buildPartial();
- } else {
- exception_ = value;
- }
- onChanged();
- } else {
- exceptionBuilder_.mergeFrom(value);
- }
- bitField0_ |= 0x00000004;
- return this;
- }
- public Builder clearException() {
- if (exceptionBuilder_ == null) {
- exception_ = org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.getDefaultInstance();
- onChanged();
- } else {
- exceptionBuilder_.clear();
- }
- bitField0_ = (bitField0_ & ~0x00000004);
- return this;
- }
- public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.Builder getExceptionBuilder() {
- bitField0_ |= 0x00000004;
- onChanged();
- return getExceptionFieldBuilder().getBuilder();
- }
- public org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProtoOrBuilder getExceptionOrBuilder() {
- if (exceptionBuilder_ != null) {
- return exceptionBuilder_.getMessageOrBuilder();
- } else {
- return exception_;
- }
- }
- private com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto, org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.Builder, org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProtoOrBuilder>
- getExceptionFieldBuilder() {
- if (exceptionBuilder_ == null) {
- exceptionBuilder_ = new com.google.protobuf.SingleFieldBuilder<
- org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto, org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.Builder, org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProtoOrBuilder>(
- exception_,
- getParentForChildren(),
- isClean());
- exception_ = null;
- }
- return exceptionBuilder_;
- }
-
- // @@protoc_insertion_point(builder_scope:HadoopRpcResponseProto)
- }
-
- static {
- defaultInstance = new HadoopRpcResponseProto(true);
- defaultInstance.initFields();
- }
-
- // @@protoc_insertion_point(class_scope:HadoopRpcResponseProto)
- }
-
- private static com.google.protobuf.Descriptors.Descriptor
- internal_static_HadoopRpcRequestProto_descriptor;
- private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internal_static_HadoopRpcRequestProto_fieldAccessorTable;
- private static com.google.protobuf.Descriptors.Descriptor
- internal_static_HadoopRpcExceptionProto_descriptor;
- private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internal_static_HadoopRpcExceptionProto_fieldAccessorTable;
- private static com.google.protobuf.Descriptors.Descriptor
- internal_static_HadoopRpcResponseProto_descriptor;
- private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internal_static_HadoopRpcResponseProto_fieldAccessorTable;
-
- public static com.google.protobuf.Descriptors.FileDescriptor
- getDescriptor() {
- return descriptor;
- }
- private static com.google.protobuf.Descriptors.FileDescriptor
- descriptor;
- static {
- java.lang.String[] descriptorData = {
- "\n\020hadoop_rpc.proto\"\177\n\025HadoopRpcRequestPr" +
- "oto\022\022\n\nmethodName\030\001 \002(\t\022\017\n\007request\030\002 \001(\014" +
- "\022\"\n\032declaringClassProtocolName\030\003 \002(\t\022\035\n\025" +
- "clientProtocolVersion\030\004 \002(\004\"D\n\027HadoopRpc" +
- "ExceptionProto\022\025\n\rexceptionName\030\001 \001(\t\022\022\n" +
- "\nstackTrace\030\002 \001(\t\"\272\001\n\026HadoopRpcResponseP" +
- "roto\0226\n\006status\030\001 \002(\0162&.HadoopRpcResponse" +
- "Proto.ResponseStatus\022\020\n\010response\030\002 \001(\014\022+" +
- "\n\texception\030\003 \001(\0132\030.HadoopRpcExceptionPr" +
- "oto\")\n\016ResponseStatus\022\013\n\007SUCCESS\020\001\022\n\n\006ER",
- "RROR\020\002B4\n\036org.apache.hadoop.ipc.protobuf" +
- "B\017HadoopRpcProtos\240\001\001"
- };
- com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
- new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
- public com.google.protobuf.ExtensionRegistry assignDescriptors(
- com.google.protobuf.Descriptors.FileDescriptor root) {
- descriptor = root;
- internal_static_HadoopRpcRequestProto_descriptor =
- getDescriptor().getMessageTypes().get(0);
- internal_static_HadoopRpcRequestProto_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
- internal_static_HadoopRpcRequestProto_descriptor,
- new java.lang.String[] { "MethodName", "Request", "DeclaringClassProtocolName", "ClientProtocolVersion", },
- org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto.class,
- org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcRequestProto.Builder.class);
- internal_static_HadoopRpcExceptionProto_descriptor =
- getDescriptor().getMessageTypes().get(1);
- internal_static_HadoopRpcExceptionProto_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
- internal_static_HadoopRpcExceptionProto_descriptor,
- new java.lang.String[] { "ExceptionName", "StackTrace", },
- org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.class,
- org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcExceptionProto.Builder.class);
- internal_static_HadoopRpcResponseProto_descriptor =
- getDescriptor().getMessageTypes().get(2);
- internal_static_HadoopRpcResponseProto_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
- internal_static_HadoopRpcResponseProto_descriptor,
- new java.lang.String[] { "Status", "Response", "Exception", },
- org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.class,
- org.apache.hadoop.ipc.protobuf.HadoopRpcProtos.HadoopRpcResponseProto.Builder.class);
- return null;
- }
- };
- com.google.protobuf.Descriptors.FileDescriptor
- .internalBuildGeneratedFileFrom(descriptorData,
- new com.google.protobuf.Descriptors.FileDescriptor[] {
- }, assigner);
- }
-
- // @@protoc_insertion_point(outer_class_scope)
-}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protobuf/package-info.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protobuf/package-info.java
deleted file mode 100644
index 4a1591cc28..0000000000
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/protobuf/package-info.java
+++ /dev/null
@@ -1,22 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-@InterfaceAudience.LimitedPrivate({"HBase", "HDFS", "MapReduce"})
-@InterfaceStability.Evolving
-package org.apache.hadoop.ipc.protobuf;
-import org.apache.hadoop.classification.InterfaceAudience;
-import org.apache.hadoop.classification.InterfaceStability;
diff --git a/hadoop-common-project/hadoop-common/src/proto/hadoop_rpc.proto b/hadoop-common-project/hadoop-common/src/main/proto/hadoop_rpc.proto
similarity index 100%
rename from hadoop-common-project/hadoop-common/src/proto/hadoop_rpc.proto
rename to hadoop-common-project/hadoop-common/src/main/proto/hadoop_rpc.proto
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/protobuf/TestProtos.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/protobuf/TestProtos.java
deleted file mode 100644
index 0029d26e84..0000000000
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/protobuf/TestProtos.java
+++ /dev/null
@@ -1,1525 +0,0 @@
-// Generated by the protocol buffer compiler. DO NOT EDIT!
-// source: test.proto
-
-package org.apache.hadoop.ipc.protobuf;
-
-public final class TestProtos {
- private TestProtos() {}
- public static void registerAllExtensions(
- com.google.protobuf.ExtensionRegistry registry) {
- }
- public interface EmptyRequestProtoOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
- }
- public static final class EmptyRequestProto extends
- com.google.protobuf.GeneratedMessage
- implements EmptyRequestProtoOrBuilder {
- // Use EmptyRequestProto.newBuilder() to construct.
- private EmptyRequestProto(Builder builder) {
- super(builder);
- }
- private EmptyRequestProto(boolean noInit) {}
-
- private static final EmptyRequestProto defaultInstance;
- public static EmptyRequestProto getDefaultInstance() {
- return defaultInstance;
- }
-
- public EmptyRequestProto getDefaultInstanceForType() {
- return defaultInstance;
- }
-
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EmptyRequestProto_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EmptyRequestProto_fieldAccessorTable;
- }
-
- private void initFields() {
- }
- private byte memoizedIsInitialized = -1;
- public final boolean isInitialized() {
- byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
-
- memoizedIsInitialized = 1;
- return true;
- }
-
- public void writeTo(com.google.protobuf.CodedOutputStream output)
- throws java.io.IOException {
- getSerializedSize();
- getUnknownFields().writeTo(output);
- }
-
- private int memoizedSerializedSize = -1;
- public int getSerializedSize() {
- int size = memoizedSerializedSize;
- if (size != -1) return size;
-
- size = 0;
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
- return size;
- }
-
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
- @java.lang.Override
- public boolean equals(final java.lang.Object obj) {
- if (obj == this) {
- return true;
- }
- if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)) {
- return super.equals(obj);
- }
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto) obj;
-
- boolean result = true;
- result = result &&
- getUnknownFields().equals(other.getUnknownFields());
- return result;
- }
-
- @java.lang.Override
- public int hashCode() {
- int hash = 41;
- hash = (19 * hash) + getDescriptorForType().hashCode();
- hash = (29 * hash) + getUnknownFields().hashCode();
- return hash;
- }
-
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
- byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(java.io.InputStream input)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseDelimitedFrom(java.io.InputStream input)
- throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input)) {
- return builder.buildParsed();
- } else {
- return null;
- }
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseDelimitedFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
- return builder.buildParsed();
- } else {
- return null;
- }
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
- com.google.protobuf.CodedInputStream input)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
- }
-
- public static Builder newBuilder() { return Builder.create(); }
- public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto prototype) {
- return newBuilder().mergeFrom(prototype);
- }
- public Builder toBuilder() { return newBuilder(this); }
-
- @java.lang.Override
- protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- Builder builder = new Builder(parent);
- return builder;
- }
- public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder
- implements org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProtoOrBuilder {
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EmptyRequestProto_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EmptyRequestProto_fieldAccessorTable;
- }
-
- // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.newBuilder()
- private Builder() {
- maybeForceBuilderInitialization();
- }
-
- private Builder(BuilderParent parent) {
- super(parent);
- maybeForceBuilderInitialization();
- }
- private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
- }
- }
- private static Builder create() {
- return new Builder();
- }
-
- public Builder clear() {
- super.clear();
- return this;
- }
-
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
- public com.google.protobuf.Descriptors.Descriptor
- getDescriptorForType() {
- return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDescriptor();
- }
-
- public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto getDefaultInstanceForType() {
- return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
- }
-
- public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto build() {
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(result);
- }
- return result;
- }
-
- private org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto buildParsed()
- throws com.google.protobuf.InvalidProtocolBufferException {
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(
- result).asInvalidProtocolBufferException();
- }
- return result;
- }
-
- public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto buildPartial() {
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto(this);
- onBuilt();
- return result;
- }
-
- public Builder mergeFrom(com.google.protobuf.Message other) {
- if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto) {
- return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)other);
- } else {
- super.mergeFrom(other);
- return this;
- }
- }
-
- public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto other) {
- if (other == org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance()) return this;
- this.mergeUnknownFields(other.getUnknownFields());
- return this;
- }
-
- public final boolean isInitialized() {
- return true;
- }
-
- public Builder mergeFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder(
- this.getUnknownFields());
- while (true) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- }
- break;
- }
- }
- }
- }
-
-
- // @@protoc_insertion_point(builder_scope:EmptyRequestProto)
- }
-
- static {
- defaultInstance = new EmptyRequestProto(true);
- defaultInstance.initFields();
- }
-
- // @@protoc_insertion_point(class_scope:EmptyRequestProto)
- }
-
- public interface EmptyResponseProtoOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
- }
- public static final class EmptyResponseProto extends
- com.google.protobuf.GeneratedMessage
- implements EmptyResponseProtoOrBuilder {
- // Use EmptyResponseProto.newBuilder() to construct.
- private EmptyResponseProto(Builder builder) {
- super(builder);
- }
- private EmptyResponseProto(boolean noInit) {}
-
- private static final EmptyResponseProto defaultInstance;
- public static EmptyResponseProto getDefaultInstance() {
- return defaultInstance;
- }
-
- public EmptyResponseProto getDefaultInstanceForType() {
- return defaultInstance;
- }
-
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EmptyResponseProto_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EmptyResponseProto_fieldAccessorTable;
- }
-
- private void initFields() {
- }
- private byte memoizedIsInitialized = -1;
- public final boolean isInitialized() {
- byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
-
- memoizedIsInitialized = 1;
- return true;
- }
-
- public void writeTo(com.google.protobuf.CodedOutputStream output)
- throws java.io.IOException {
- getSerializedSize();
- getUnknownFields().writeTo(output);
- }
-
- private int memoizedSerializedSize = -1;
- public int getSerializedSize() {
- int size = memoizedSerializedSize;
- if (size != -1) return size;
-
- size = 0;
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
- return size;
- }
-
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
- @java.lang.Override
- public boolean equals(final java.lang.Object obj) {
- if (obj == this) {
- return true;
- }
- if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto)) {
- return super.equals(obj);
- }
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) obj;
-
- boolean result = true;
- result = result &&
- getUnknownFields().equals(other.getUnknownFields());
- return result;
- }
-
- @java.lang.Override
- public int hashCode() {
- int hash = 41;
- hash = (19 * hash) + getDescriptorForType().hashCode();
- hash = (29 * hash) + getUnknownFields().hashCode();
- return hash;
- }
-
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
- byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(java.io.InputStream input)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseDelimitedFrom(java.io.InputStream input)
- throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input)) {
- return builder.buildParsed();
- } else {
- return null;
- }
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseDelimitedFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
- return builder.buildParsed();
- } else {
- return null;
- }
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
- com.google.protobuf.CodedInputStream input)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
- }
-
- public static Builder newBuilder() { return Builder.create(); }
- public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto prototype) {
- return newBuilder().mergeFrom(prototype);
- }
- public Builder toBuilder() { return newBuilder(this); }
-
- @java.lang.Override
- protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- Builder builder = new Builder(parent);
- return builder;
- }
- public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder
- implements org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProtoOrBuilder {
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EmptyResponseProto_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EmptyResponseProto_fieldAccessorTable;
- }
-
- // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.newBuilder()
- private Builder() {
- maybeForceBuilderInitialization();
- }
-
- private Builder(BuilderParent parent) {
- super(parent);
- maybeForceBuilderInitialization();
- }
- private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
- }
- }
- private static Builder create() {
- return new Builder();
- }
-
- public Builder clear() {
- super.clear();
- return this;
- }
-
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
- public com.google.protobuf.Descriptors.Descriptor
- getDescriptorForType() {
- return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDescriptor();
- }
-
- public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto getDefaultInstanceForType() {
- return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
- }
-
- public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto build() {
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(result);
- }
- return result;
- }
-
- private org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto buildParsed()
- throws com.google.protobuf.InvalidProtocolBufferException {
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(
- result).asInvalidProtocolBufferException();
- }
- return result;
- }
-
- public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto buildPartial() {
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto(this);
- onBuilt();
- return result;
- }
-
- public Builder mergeFrom(com.google.protobuf.Message other) {
- if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) {
- return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto)other);
- } else {
- super.mergeFrom(other);
- return this;
- }
- }
-
- public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto other) {
- if (other == org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()) return this;
- this.mergeUnknownFields(other.getUnknownFields());
- return this;
- }
-
- public final boolean isInitialized() {
- return true;
- }
-
- public Builder mergeFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder(
- this.getUnknownFields());
- while (true) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- }
- break;
- }
- }
- }
- }
-
-
- // @@protoc_insertion_point(builder_scope:EmptyResponseProto)
- }
-
- static {
- defaultInstance = new EmptyResponseProto(true);
- defaultInstance.initFields();
- }
-
- // @@protoc_insertion_point(class_scope:EmptyResponseProto)
- }
-
- public interface EchoRequestProtoOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
-
- // required string message = 1;
- boolean hasMessage();
- String getMessage();
- }
- public static final class EchoRequestProto extends
- com.google.protobuf.GeneratedMessage
- implements EchoRequestProtoOrBuilder {
- // Use EchoRequestProto.newBuilder() to construct.
- private EchoRequestProto(Builder builder) {
- super(builder);
- }
- private EchoRequestProto(boolean noInit) {}
-
- private static final EchoRequestProto defaultInstance;
- public static EchoRequestProto getDefaultInstance() {
- return defaultInstance;
- }
-
- public EchoRequestProto getDefaultInstanceForType() {
- return defaultInstance;
- }
-
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EchoRequestProto_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EchoRequestProto_fieldAccessorTable;
- }
-
- private int bitField0_;
- // required string message = 1;
- public static final int MESSAGE_FIELD_NUMBER = 1;
- private java.lang.Object message_;
- public boolean hasMessage() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- public String getMessage() {
- java.lang.Object ref = message_;
- if (ref instanceof String) {
- return (String) ref;
- } else {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
- String s = bs.toStringUtf8();
- if (com.google.protobuf.Internal.isValidUtf8(bs)) {
- message_ = s;
- }
- return s;
- }
- }
- private com.google.protobuf.ByteString getMessageBytes() {
- java.lang.Object ref = message_;
- if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8((String) ref);
- message_ = b;
- return b;
- } else {
- return (com.google.protobuf.ByteString) ref;
- }
- }
-
- private void initFields() {
- message_ = "";
- }
- private byte memoizedIsInitialized = -1;
- public final boolean isInitialized() {
- byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
-
- if (!hasMessage()) {
- memoizedIsInitialized = 0;
- return false;
- }
- memoizedIsInitialized = 1;
- return true;
- }
-
- public void writeTo(com.google.protobuf.CodedOutputStream output)
- throws java.io.IOException {
- getSerializedSize();
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- output.writeBytes(1, getMessageBytes());
- }
- getUnknownFields().writeTo(output);
- }
-
- private int memoizedSerializedSize = -1;
- public int getSerializedSize() {
- int size = memoizedSerializedSize;
- if (size != -1) return size;
-
- size = 0;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(1, getMessageBytes());
- }
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
- return size;
- }
-
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
- @java.lang.Override
- public boolean equals(final java.lang.Object obj) {
- if (obj == this) {
- return true;
- }
- if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)) {
- return super.equals(obj);
- }
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto) obj;
-
- boolean result = true;
- result = result && (hasMessage() == other.hasMessage());
- if (hasMessage()) {
- result = result && getMessage()
- .equals(other.getMessage());
- }
- result = result &&
- getUnknownFields().equals(other.getUnknownFields());
- return result;
- }
-
- @java.lang.Override
- public int hashCode() {
- int hash = 41;
- hash = (19 * hash) + getDescriptorForType().hashCode();
- if (hasMessage()) {
- hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
- hash = (53 * hash) + getMessage().hashCode();
- }
- hash = (29 * hash) + getUnknownFields().hashCode();
- return hash;
- }
-
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
- byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(java.io.InputStream input)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseDelimitedFrom(java.io.InputStream input)
- throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input)) {
- return builder.buildParsed();
- } else {
- return null;
- }
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseDelimitedFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
- return builder.buildParsed();
- } else {
- return null;
- }
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
- com.google.protobuf.CodedInputStream input)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
- }
-
- public static Builder newBuilder() { return Builder.create(); }
- public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto prototype) {
- return newBuilder().mergeFrom(prototype);
- }
- public Builder toBuilder() { return newBuilder(this); }
-
- @java.lang.Override
- protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- Builder builder = new Builder(parent);
- return builder;
- }
- public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder
- implements org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProtoOrBuilder {
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EchoRequestProto_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EchoRequestProto_fieldAccessorTable;
- }
-
- // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.newBuilder()
- private Builder() {
- maybeForceBuilderInitialization();
- }
-
- private Builder(BuilderParent parent) {
- super(parent);
- maybeForceBuilderInitialization();
- }
- private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
- }
- }
- private static Builder create() {
- return new Builder();
- }
-
- public Builder clear() {
- super.clear();
- message_ = "";
- bitField0_ = (bitField0_ & ~0x00000001);
- return this;
- }
-
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
- public com.google.protobuf.Descriptors.Descriptor
- getDescriptorForType() {
- return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDescriptor();
- }
-
- public org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto getDefaultInstanceForType() {
- return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance();
- }
-
- public org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto build() {
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(result);
- }
- return result;
- }
-
- private org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto buildParsed()
- throws com.google.protobuf.InvalidProtocolBufferException {
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(
- result).asInvalidProtocolBufferException();
- }
- return result;
- }
-
- public org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto buildPartial() {
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto(this);
- int from_bitField0_ = bitField0_;
- int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
- to_bitField0_ |= 0x00000001;
- }
- result.message_ = message_;
- result.bitField0_ = to_bitField0_;
- onBuilt();
- return result;
- }
-
- public Builder mergeFrom(com.google.protobuf.Message other) {
- if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto) {
- return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)other);
- } else {
- super.mergeFrom(other);
- return this;
- }
- }
-
- public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto other) {
- if (other == org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance()) return this;
- if (other.hasMessage()) {
- setMessage(other.getMessage());
- }
- this.mergeUnknownFields(other.getUnknownFields());
- return this;
- }
-
- public final boolean isInitialized() {
- if (!hasMessage()) {
-
- return false;
- }
- return true;
- }
-
- public Builder mergeFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder(
- this.getUnknownFields());
- while (true) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- }
- break;
- }
- case 10: {
- bitField0_ |= 0x00000001;
- message_ = input.readBytes();
- break;
- }
- }
- }
- }
-
- private int bitField0_;
-
- // required string message = 1;
- private java.lang.Object message_ = "";
- public boolean hasMessage() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- public String getMessage() {
- java.lang.Object ref = message_;
- if (!(ref instanceof String)) {
- String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
- message_ = s;
- return s;
- } else {
- return (String) ref;
- }
- }
- public Builder setMessage(String value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000001;
- message_ = value;
- onChanged();
- return this;
- }
- public Builder clearMessage() {
- bitField0_ = (bitField0_ & ~0x00000001);
- message_ = getDefaultInstance().getMessage();
- onChanged();
- return this;
- }
- void setMessage(com.google.protobuf.ByteString value) {
- bitField0_ |= 0x00000001;
- message_ = value;
- onChanged();
- }
-
- // @@protoc_insertion_point(builder_scope:EchoRequestProto)
- }
-
- static {
- defaultInstance = new EchoRequestProto(true);
- defaultInstance.initFields();
- }
-
- // @@protoc_insertion_point(class_scope:EchoRequestProto)
- }
-
- public interface EchoResponseProtoOrBuilder
- extends com.google.protobuf.MessageOrBuilder {
-
- // required string message = 1;
- boolean hasMessage();
- String getMessage();
- }
- public static final class EchoResponseProto extends
- com.google.protobuf.GeneratedMessage
- implements EchoResponseProtoOrBuilder {
- // Use EchoResponseProto.newBuilder() to construct.
- private EchoResponseProto(Builder builder) {
- super(builder);
- }
- private EchoResponseProto(boolean noInit) {}
-
- private static final EchoResponseProto defaultInstance;
- public static EchoResponseProto getDefaultInstance() {
- return defaultInstance;
- }
-
- public EchoResponseProto getDefaultInstanceForType() {
- return defaultInstance;
- }
-
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EchoResponseProto_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EchoResponseProto_fieldAccessorTable;
- }
-
- private int bitField0_;
- // required string message = 1;
- public static final int MESSAGE_FIELD_NUMBER = 1;
- private java.lang.Object message_;
- public boolean hasMessage() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- public String getMessage() {
- java.lang.Object ref = message_;
- if (ref instanceof String) {
- return (String) ref;
- } else {
- com.google.protobuf.ByteString bs =
- (com.google.protobuf.ByteString) ref;
- String s = bs.toStringUtf8();
- if (com.google.protobuf.Internal.isValidUtf8(bs)) {
- message_ = s;
- }
- return s;
- }
- }
- private com.google.protobuf.ByteString getMessageBytes() {
- java.lang.Object ref = message_;
- if (ref instanceof String) {
- com.google.protobuf.ByteString b =
- com.google.protobuf.ByteString.copyFromUtf8((String) ref);
- message_ = b;
- return b;
- } else {
- return (com.google.protobuf.ByteString) ref;
- }
- }
-
- private void initFields() {
- message_ = "";
- }
- private byte memoizedIsInitialized = -1;
- public final boolean isInitialized() {
- byte isInitialized = memoizedIsInitialized;
- if (isInitialized != -1) return isInitialized == 1;
-
- if (!hasMessage()) {
- memoizedIsInitialized = 0;
- return false;
- }
- memoizedIsInitialized = 1;
- return true;
- }
-
- public void writeTo(com.google.protobuf.CodedOutputStream output)
- throws java.io.IOException {
- getSerializedSize();
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- output.writeBytes(1, getMessageBytes());
- }
- getUnknownFields().writeTo(output);
- }
-
- private int memoizedSerializedSize = -1;
- public int getSerializedSize() {
- int size = memoizedSerializedSize;
- if (size != -1) return size;
-
- size = 0;
- if (((bitField0_ & 0x00000001) == 0x00000001)) {
- size += com.google.protobuf.CodedOutputStream
- .computeBytesSize(1, getMessageBytes());
- }
- size += getUnknownFields().getSerializedSize();
- memoizedSerializedSize = size;
- return size;
- }
-
- private static final long serialVersionUID = 0L;
- @java.lang.Override
- protected java.lang.Object writeReplace()
- throws java.io.ObjectStreamException {
- return super.writeReplace();
- }
-
- @java.lang.Override
- public boolean equals(final java.lang.Object obj) {
- if (obj == this) {
- return true;
- }
- if (!(obj instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto)) {
- return super.equals(obj);
- }
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto other = (org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto) obj;
-
- boolean result = true;
- result = result && (hasMessage() == other.hasMessage());
- if (hasMessage()) {
- result = result && getMessage()
- .equals(other.getMessage());
- }
- result = result &&
- getUnknownFields().equals(other.getUnknownFields());
- return result;
- }
-
- @java.lang.Override
- public int hashCode() {
- int hash = 41;
- hash = (19 * hash) + getDescriptorForType().hashCode();
- if (hasMessage()) {
- hash = (37 * hash) + MESSAGE_FIELD_NUMBER;
- hash = (53 * hash) + getMessage().hashCode();
- }
- hash = (29 * hash) + getUnknownFields().hashCode();
- return hash;
- }
-
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
- com.google.protobuf.ByteString data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
- com.google.protobuf.ByteString data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(byte[] data)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data).buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
- byte[] data,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws com.google.protobuf.InvalidProtocolBufferException {
- return newBuilder().mergeFrom(data, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(java.io.InputStream input)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseDelimitedFrom(java.io.InputStream input)
- throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input)) {
- return builder.buildParsed();
- } else {
- return null;
- }
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseDelimitedFrom(
- java.io.InputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- Builder builder = newBuilder();
- if (builder.mergeDelimitedFrom(input, extensionRegistry)) {
- return builder.buildParsed();
- } else {
- return null;
- }
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
- com.google.protobuf.CodedInputStream input)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input).buildParsed();
- }
- public static org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto parseFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- return newBuilder().mergeFrom(input, extensionRegistry)
- .buildParsed();
- }
-
- public static Builder newBuilder() { return Builder.create(); }
- public Builder newBuilderForType() { return newBuilder(); }
- public static Builder newBuilder(org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto prototype) {
- return newBuilder().mergeFrom(prototype);
- }
- public Builder toBuilder() { return newBuilder(this); }
-
- @java.lang.Override
- protected Builder newBuilderForType(
- com.google.protobuf.GeneratedMessage.BuilderParent parent) {
- Builder builder = new Builder(parent);
- return builder;
- }
- public static final class Builder extends
- com.google.protobuf.GeneratedMessage.Builder
- implements org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProtoOrBuilder {
- public static final com.google.protobuf.Descriptors.Descriptor
- getDescriptor() {
- return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EchoResponseProto_descriptor;
- }
-
- protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internalGetFieldAccessorTable() {
- return org.apache.hadoop.ipc.protobuf.TestProtos.internal_static_EchoResponseProto_fieldAccessorTable;
- }
-
- // Construct using org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.newBuilder()
- private Builder() {
- maybeForceBuilderInitialization();
- }
-
- private Builder(BuilderParent parent) {
- super(parent);
- maybeForceBuilderInitialization();
- }
- private void maybeForceBuilderInitialization() {
- if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
- }
- }
- private static Builder create() {
- return new Builder();
- }
-
- public Builder clear() {
- super.clear();
- message_ = "";
- bitField0_ = (bitField0_ & ~0x00000001);
- return this;
- }
-
- public Builder clone() {
- return create().mergeFrom(buildPartial());
- }
-
- public com.google.protobuf.Descriptors.Descriptor
- getDescriptorForType() {
- return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDescriptor();
- }
-
- public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto getDefaultInstanceForType() {
- return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance();
- }
-
- public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto build() {
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(result);
- }
- return result;
- }
-
- private org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto buildParsed()
- throws com.google.protobuf.InvalidProtocolBufferException {
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto result = buildPartial();
- if (!result.isInitialized()) {
- throw newUninitializedMessageException(
- result).asInvalidProtocolBufferException();
- }
- return result;
- }
-
- public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto buildPartial() {
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto result = new org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto(this);
- int from_bitField0_ = bitField0_;
- int to_bitField0_ = 0;
- if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
- to_bitField0_ |= 0x00000001;
- }
- result.message_ = message_;
- result.bitField0_ = to_bitField0_;
- onBuilt();
- return result;
- }
-
- public Builder mergeFrom(com.google.protobuf.Message other) {
- if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto) {
- return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto)other);
- } else {
- super.mergeFrom(other);
- return this;
- }
- }
-
- public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto other) {
- if (other == org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance()) return this;
- if (other.hasMessage()) {
- setMessage(other.getMessage());
- }
- this.mergeUnknownFields(other.getUnknownFields());
- return this;
- }
-
- public final boolean isInitialized() {
- if (!hasMessage()) {
-
- return false;
- }
- return true;
- }
-
- public Builder mergeFrom(
- com.google.protobuf.CodedInputStream input,
- com.google.protobuf.ExtensionRegistryLite extensionRegistry)
- throws java.io.IOException {
- com.google.protobuf.UnknownFieldSet.Builder unknownFields =
- com.google.protobuf.UnknownFieldSet.newBuilder(
- this.getUnknownFields());
- while (true) {
- int tag = input.readTag();
- switch (tag) {
- case 0:
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- default: {
- if (!parseUnknownField(input, unknownFields,
- extensionRegistry, tag)) {
- this.setUnknownFields(unknownFields.build());
- onChanged();
- return this;
- }
- break;
- }
- case 10: {
- bitField0_ |= 0x00000001;
- message_ = input.readBytes();
- break;
- }
- }
- }
- }
-
- private int bitField0_;
-
- // required string message = 1;
- private java.lang.Object message_ = "";
- public boolean hasMessage() {
- return ((bitField0_ & 0x00000001) == 0x00000001);
- }
- public String getMessage() {
- java.lang.Object ref = message_;
- if (!(ref instanceof String)) {
- String s = ((com.google.protobuf.ByteString) ref).toStringUtf8();
- message_ = s;
- return s;
- } else {
- return (String) ref;
- }
- }
- public Builder setMessage(String value) {
- if (value == null) {
- throw new NullPointerException();
- }
- bitField0_ |= 0x00000001;
- message_ = value;
- onChanged();
- return this;
- }
- public Builder clearMessage() {
- bitField0_ = (bitField0_ & ~0x00000001);
- message_ = getDefaultInstance().getMessage();
- onChanged();
- return this;
- }
- void setMessage(com.google.protobuf.ByteString value) {
- bitField0_ |= 0x00000001;
- message_ = value;
- onChanged();
- }
-
- // @@protoc_insertion_point(builder_scope:EchoResponseProto)
- }
-
- static {
- defaultInstance = new EchoResponseProto(true);
- defaultInstance.initFields();
- }
-
- // @@protoc_insertion_point(class_scope:EchoResponseProto)
- }
-
- private static com.google.protobuf.Descriptors.Descriptor
- internal_static_EmptyRequestProto_descriptor;
- private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internal_static_EmptyRequestProto_fieldAccessorTable;
- private static com.google.protobuf.Descriptors.Descriptor
- internal_static_EmptyResponseProto_descriptor;
- private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internal_static_EmptyResponseProto_fieldAccessorTable;
- private static com.google.protobuf.Descriptors.Descriptor
- internal_static_EchoRequestProto_descriptor;
- private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internal_static_EchoRequestProto_fieldAccessorTable;
- private static com.google.protobuf.Descriptors.Descriptor
- internal_static_EchoResponseProto_descriptor;
- private static
- com.google.protobuf.GeneratedMessage.FieldAccessorTable
- internal_static_EchoResponseProto_fieldAccessorTable;
-
- public static com.google.protobuf.Descriptors.FileDescriptor
- getDescriptor() {
- return descriptor;
- }
- private static com.google.protobuf.Descriptors.FileDescriptor
- descriptor;
- static {
- java.lang.String[] descriptorData = {
- "\n\ntest.proto\"\023\n\021EmptyRequestProto\"\024\n\022Emp" +
- "tyResponseProto\"#\n\020EchoRequestProto\022\017\n\007m" +
- "essage\030\001 \002(\t\"$\n\021EchoResponseProto\022\017\n\007mes" +
- "sage\030\001 \002(\tB/\n\036org.apache.hadoop.ipc.prot" +
- "obufB\nTestProtos\240\001\001"
- };
- com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
- new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
- public com.google.protobuf.ExtensionRegistry assignDescriptors(
- com.google.protobuf.Descriptors.FileDescriptor root) {
- descriptor = root;
- internal_static_EmptyRequestProto_descriptor =
- getDescriptor().getMessageTypes().get(0);
- internal_static_EmptyRequestProto_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
- internal_static_EmptyRequestProto_descriptor,
- new java.lang.String[] { },
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.class,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.Builder.class);
- internal_static_EmptyResponseProto_descriptor =
- getDescriptor().getMessageTypes().get(1);
- internal_static_EmptyResponseProto_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
- internal_static_EmptyResponseProto_descriptor,
- new java.lang.String[] { },
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.Builder.class);
- internal_static_EchoRequestProto_descriptor =
- getDescriptor().getMessageTypes().get(2);
- internal_static_EchoRequestProto_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
- internal_static_EchoRequestProto_descriptor,
- new java.lang.String[] { "Message", },
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.class,
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.Builder.class);
- internal_static_EchoResponseProto_descriptor =
- getDescriptor().getMessageTypes().get(3);
- internal_static_EchoResponseProto_fieldAccessorTable = new
- com.google.protobuf.GeneratedMessage.FieldAccessorTable(
- internal_static_EchoResponseProto_descriptor,
- new java.lang.String[] { "Message", },
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.class,
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.Builder.class);
- return null;
- }
- };
- com.google.protobuf.Descriptors.FileDescriptor
- .internalBuildGeneratedFileFrom(descriptorData,
- new com.google.protobuf.Descriptors.FileDescriptor[] {
- }, assigner);
- }
-
- // @@protoc_insertion_point(outer_class_scope)
-}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/protobuf/TestRpcServiceProtos.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/protobuf/TestRpcServiceProtos.java
deleted file mode 100644
index 8b192971ab..0000000000
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/ipc/protobuf/TestRpcServiceProtos.java
+++ /dev/null
@@ -1,684 +0,0 @@
-// Generated by the protocol buffer compiler. DO NOT EDIT!
-// source: test_rpc_service.proto
-
-package org.apache.hadoop.ipc.protobuf;
-
-public final class TestRpcServiceProtos {
- private TestRpcServiceProtos() {}
- public static void registerAllExtensions(
- com.google.protobuf.ExtensionRegistry registry) {
- }
- public static abstract class TestProtobufRpcProto
- implements com.google.protobuf.Service {
- protected TestProtobufRpcProto() {}
-
- public interface Interface {
- public abstract void ping(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
- com.google.protobuf.RpcCallback done);
-
- public abstract void echo(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
- com.google.protobuf.RpcCallback done);
-
- public abstract void error(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
- com.google.protobuf.RpcCallback done);
-
- }
-
- public static com.google.protobuf.Service newReflectiveService(
- final Interface impl) {
- return new TestProtobufRpcProto() {
- @java.lang.Override
- public void ping(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
- com.google.protobuf.RpcCallback done) {
- impl.ping(controller, request, done);
- }
-
- @java.lang.Override
- public void echo(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
- com.google.protobuf.RpcCallback done) {
- impl.echo(controller, request, done);
- }
-
- @java.lang.Override
- public void error(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
- com.google.protobuf.RpcCallback done) {
- impl.error(controller, request, done);
- }
-
- };
- }
-
- public static com.google.protobuf.BlockingService
- newReflectiveBlockingService(final BlockingInterface impl) {
- return new com.google.protobuf.BlockingService() {
- public final com.google.protobuf.Descriptors.ServiceDescriptor
- getDescriptorForType() {
- return getDescriptor();
- }
-
- public final com.google.protobuf.Message callBlockingMethod(
- com.google.protobuf.Descriptors.MethodDescriptor method,
- com.google.protobuf.RpcController controller,
- com.google.protobuf.Message request)
- throws com.google.protobuf.ServiceException {
- if (method.getService() != getDescriptor()) {
- throw new java.lang.IllegalArgumentException(
- "Service.callBlockingMethod() given method descriptor for " +
- "wrong service type.");
- }
- switch(method.getIndex()) {
- case 0:
- return impl.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
- case 1:
- return impl.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)request);
- case 2:
- return impl.error(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
- default:
- throw new java.lang.AssertionError("Can't get here.");
- }
- }
-
- public final com.google.protobuf.Message
- getRequestPrototype(
- com.google.protobuf.Descriptors.MethodDescriptor method) {
- if (method.getService() != getDescriptor()) {
- throw new java.lang.IllegalArgumentException(
- "Service.getRequestPrototype() given method " +
- "descriptor for wrong service type.");
- }
- switch(method.getIndex()) {
- case 0:
- return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
- case 1:
- return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance();
- case 2:
- return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
- default:
- throw new java.lang.AssertionError("Can't get here.");
- }
- }
-
- public final com.google.protobuf.Message
- getResponsePrototype(
- com.google.protobuf.Descriptors.MethodDescriptor method) {
- if (method.getService() != getDescriptor()) {
- throw new java.lang.IllegalArgumentException(
- "Service.getResponsePrototype() given method " +
- "descriptor for wrong service type.");
- }
- switch(method.getIndex()) {
- case 0:
- return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
- case 1:
- return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance();
- case 2:
- return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
- default:
- throw new java.lang.AssertionError("Can't get here.");
- }
- }
-
- };
- }
-
- public abstract void ping(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
- com.google.protobuf.RpcCallback done);
-
- public abstract void echo(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
- com.google.protobuf.RpcCallback done);
-
- public abstract void error(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
- com.google.protobuf.RpcCallback done);
-
- public static final
- com.google.protobuf.Descriptors.ServiceDescriptor
- getDescriptor() {
- return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.getDescriptor().getServices().get(0);
- }
- public final com.google.protobuf.Descriptors.ServiceDescriptor
- getDescriptorForType() {
- return getDescriptor();
- }
-
- public final void callMethod(
- com.google.protobuf.Descriptors.MethodDescriptor method,
- com.google.protobuf.RpcController controller,
- com.google.protobuf.Message request,
- com.google.protobuf.RpcCallback<
- com.google.protobuf.Message> done) {
- if (method.getService() != getDescriptor()) {
- throw new java.lang.IllegalArgumentException(
- "Service.callMethod() given method descriptor for wrong " +
- "service type.");
- }
- switch(method.getIndex()) {
- case 0:
- this.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
- com.google.protobuf.RpcUtil.specializeCallback(
- done));
- return;
- case 1:
- this.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)request,
- com.google.protobuf.RpcUtil.specializeCallback(
- done));
- return;
- case 2:
- this.error(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
- com.google.protobuf.RpcUtil.specializeCallback(
- done));
- return;
- default:
- throw new java.lang.AssertionError("Can't get here.");
- }
- }
-
- public final com.google.protobuf.Message
- getRequestPrototype(
- com.google.protobuf.Descriptors.MethodDescriptor method) {
- if (method.getService() != getDescriptor()) {
- throw new java.lang.IllegalArgumentException(
- "Service.getRequestPrototype() given method " +
- "descriptor for wrong service type.");
- }
- switch(method.getIndex()) {
- case 0:
- return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
- case 1:
- return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance();
- case 2:
- return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
- default:
- throw new java.lang.AssertionError("Can't get here.");
- }
- }
-
- public final com.google.protobuf.Message
- getResponsePrototype(
- com.google.protobuf.Descriptors.MethodDescriptor method) {
- if (method.getService() != getDescriptor()) {
- throw new java.lang.IllegalArgumentException(
- "Service.getResponsePrototype() given method " +
- "descriptor for wrong service type.");
- }
- switch(method.getIndex()) {
- case 0:
- return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
- case 1:
- return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance();
- case 2:
- return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
- default:
- throw new java.lang.AssertionError("Can't get here.");
- }
- }
-
- public static Stub newStub(
- com.google.protobuf.RpcChannel channel) {
- return new Stub(channel);
- }
-
- public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.TestProtobufRpcProto implements Interface {
- private Stub(com.google.protobuf.RpcChannel channel) {
- this.channel = channel;
- }
-
- private final com.google.protobuf.RpcChannel channel;
-
- public com.google.protobuf.RpcChannel getChannel() {
- return channel;
- }
-
- public void ping(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
- com.google.protobuf.RpcCallback done) {
- channel.callMethod(
- getDescriptor().getMethods().get(0),
- controller,
- request,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
- com.google.protobuf.RpcUtil.generalizeCallback(
- done,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
- }
-
- public void echo(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
- com.google.protobuf.RpcCallback done) {
- channel.callMethod(
- getDescriptor().getMethods().get(1),
- controller,
- request,
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance(),
- com.google.protobuf.RpcUtil.generalizeCallback(
- done,
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.class,
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance()));
- }
-
- public void error(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
- com.google.protobuf.RpcCallback done) {
- channel.callMethod(
- getDescriptor().getMethods().get(2),
- controller,
- request,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
- com.google.protobuf.RpcUtil.generalizeCallback(
- done,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
- }
- }
-
- public static BlockingInterface newBlockingStub(
- com.google.protobuf.BlockingRpcChannel channel) {
- return new BlockingStub(channel);
- }
-
- public interface BlockingInterface {
- public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto ping(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
- throws com.google.protobuf.ServiceException;
-
- public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto echo(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request)
- throws com.google.protobuf.ServiceException;
-
- public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto error(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
- throws com.google.protobuf.ServiceException;
- }
-
- private static final class BlockingStub implements BlockingInterface {
- private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
- this.channel = channel;
- }
-
- private final com.google.protobuf.BlockingRpcChannel channel;
-
- public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto ping(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
- throws com.google.protobuf.ServiceException {
- return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
- getDescriptor().getMethods().get(0),
- controller,
- request,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
- }
-
-
- public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto echo(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request)
- throws com.google.protobuf.ServiceException {
- return (org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto) channel.callBlockingMethod(
- getDescriptor().getMethods().get(1),
- controller,
- request,
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance());
- }
-
-
- public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto error(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
- throws com.google.protobuf.ServiceException {
- return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
- getDescriptor().getMethods().get(2),
- controller,
- request,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
- }
-
- }
- }
-
- public static abstract class TestProtobufRpc2Proto
- implements com.google.protobuf.Service {
- protected TestProtobufRpc2Proto() {}
-
- public interface Interface {
- public abstract void ping2(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
- com.google.protobuf.RpcCallback done);
-
- public abstract void echo2(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
- com.google.protobuf.RpcCallback done);
-
- }
-
- public static com.google.protobuf.Service newReflectiveService(
- final Interface impl) {
- return new TestProtobufRpc2Proto() {
- @java.lang.Override
- public void ping2(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
- com.google.protobuf.RpcCallback done) {
- impl.ping2(controller, request, done);
- }
-
- @java.lang.Override
- public void echo2(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
- com.google.protobuf.RpcCallback done) {
- impl.echo2(controller, request, done);
- }
-
- };
- }
-
- public static com.google.protobuf.BlockingService
- newReflectiveBlockingService(final BlockingInterface impl) {
- return new com.google.protobuf.BlockingService() {
- public final com.google.protobuf.Descriptors.ServiceDescriptor
- getDescriptorForType() {
- return getDescriptor();
- }
-
- public final com.google.protobuf.Message callBlockingMethod(
- com.google.protobuf.Descriptors.MethodDescriptor method,
- com.google.protobuf.RpcController controller,
- com.google.protobuf.Message request)
- throws com.google.protobuf.ServiceException {
- if (method.getService() != getDescriptor()) {
- throw new java.lang.IllegalArgumentException(
- "Service.callBlockingMethod() given method descriptor for " +
- "wrong service type.");
- }
- switch(method.getIndex()) {
- case 0:
- return impl.ping2(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request);
- case 1:
- return impl.echo2(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)request);
- default:
- throw new java.lang.AssertionError("Can't get here.");
- }
- }
-
- public final com.google.protobuf.Message
- getRequestPrototype(
- com.google.protobuf.Descriptors.MethodDescriptor method) {
- if (method.getService() != getDescriptor()) {
- throw new java.lang.IllegalArgumentException(
- "Service.getRequestPrototype() given method " +
- "descriptor for wrong service type.");
- }
- switch(method.getIndex()) {
- case 0:
- return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
- case 1:
- return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance();
- default:
- throw new java.lang.AssertionError("Can't get here.");
- }
- }
-
- public final com.google.protobuf.Message
- getResponsePrototype(
- com.google.protobuf.Descriptors.MethodDescriptor method) {
- if (method.getService() != getDescriptor()) {
- throw new java.lang.IllegalArgumentException(
- "Service.getResponsePrototype() given method " +
- "descriptor for wrong service type.");
- }
- switch(method.getIndex()) {
- case 0:
- return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
- case 1:
- return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance();
- default:
- throw new java.lang.AssertionError("Can't get here.");
- }
- }
-
- };
- }
-
- public abstract void ping2(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
- com.google.protobuf.RpcCallback done);
-
- public abstract void echo2(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
- com.google.protobuf.RpcCallback done);
-
- public static final
- com.google.protobuf.Descriptors.ServiceDescriptor
- getDescriptor() {
- return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.getDescriptor().getServices().get(1);
- }
- public final com.google.protobuf.Descriptors.ServiceDescriptor
- getDescriptorForType() {
- return getDescriptor();
- }
-
- public final void callMethod(
- com.google.protobuf.Descriptors.MethodDescriptor method,
- com.google.protobuf.RpcController controller,
- com.google.protobuf.Message request,
- com.google.protobuf.RpcCallback<
- com.google.protobuf.Message> done) {
- if (method.getService() != getDescriptor()) {
- throw new java.lang.IllegalArgumentException(
- "Service.callMethod() given method descriptor for wrong " +
- "service type.");
- }
- switch(method.getIndex()) {
- case 0:
- this.ping2(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto)request,
- com.google.protobuf.RpcUtil.specializeCallback(
- done));
- return;
- case 1:
- this.echo2(controller, (org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto)request,
- com.google.protobuf.RpcUtil.specializeCallback(
- done));
- return;
- default:
- throw new java.lang.AssertionError("Can't get here.");
- }
- }
-
- public final com.google.protobuf.Message
- getRequestPrototype(
- com.google.protobuf.Descriptors.MethodDescriptor method) {
- if (method.getService() != getDescriptor()) {
- throw new java.lang.IllegalArgumentException(
- "Service.getRequestPrototype() given method " +
- "descriptor for wrong service type.");
- }
- switch(method.getIndex()) {
- case 0:
- return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto.getDefaultInstance();
- case 1:
- return org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto.getDefaultInstance();
- default:
- throw new java.lang.AssertionError("Can't get here.");
- }
- }
-
- public final com.google.protobuf.Message
- getResponsePrototype(
- com.google.protobuf.Descriptors.MethodDescriptor method) {
- if (method.getService() != getDescriptor()) {
- throw new java.lang.IllegalArgumentException(
- "Service.getResponsePrototype() given method " +
- "descriptor for wrong service type.");
- }
- switch(method.getIndex()) {
- case 0:
- return org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance();
- case 1:
- return org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance();
- default:
- throw new java.lang.AssertionError("Can't get here.");
- }
- }
-
- public static Stub newStub(
- com.google.protobuf.RpcChannel channel) {
- return new Stub(channel);
- }
-
- public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtos.TestProtobufRpc2Proto implements Interface {
- private Stub(com.google.protobuf.RpcChannel channel) {
- this.channel = channel;
- }
-
- private final com.google.protobuf.RpcChannel channel;
-
- public com.google.protobuf.RpcChannel getChannel() {
- return channel;
- }
-
- public void ping2(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request,
- com.google.protobuf.RpcCallback done) {
- channel.callMethod(
- getDescriptor().getMethods().get(0),
- controller,
- request,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance(),
- com.google.protobuf.RpcUtil.generalizeCallback(
- done,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.class,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance()));
- }
-
- public void echo2(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request,
- com.google.protobuf.RpcCallback done) {
- channel.callMethod(
- getDescriptor().getMethods().get(1),
- controller,
- request,
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance(),
- com.google.protobuf.RpcUtil.generalizeCallback(
- done,
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.class,
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance()));
- }
- }
-
- public static BlockingInterface newBlockingStub(
- com.google.protobuf.BlockingRpcChannel channel) {
- return new BlockingStub(channel);
- }
-
- public interface BlockingInterface {
- public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto ping2(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
- throws com.google.protobuf.ServiceException;
-
- public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto echo2(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request)
- throws com.google.protobuf.ServiceException;
- }
-
- private static final class BlockingStub implements BlockingInterface {
- private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
- this.channel = channel;
- }
-
- private final com.google.protobuf.BlockingRpcChannel channel;
-
- public org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto ping2(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyRequestProto request)
- throws com.google.protobuf.ServiceException {
- return (org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto) channel.callBlockingMethod(
- getDescriptor().getMethods().get(0),
- controller,
- request,
- org.apache.hadoop.ipc.protobuf.TestProtos.EmptyResponseProto.getDefaultInstance());
- }
-
-
- public org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto echo2(
- com.google.protobuf.RpcController controller,
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoRequestProto request)
- throws com.google.protobuf.ServiceException {
- return (org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto) channel.callBlockingMethod(
- getDescriptor().getMethods().get(1),
- controller,
- request,
- org.apache.hadoop.ipc.protobuf.TestProtos.EchoResponseProto.getDefaultInstance());
- }
-
- }
- }
-
-
- public static com.google.protobuf.Descriptors.FileDescriptor
- getDescriptor() {
- return descriptor;
- }
- private static com.google.protobuf.Descriptors.FileDescriptor
- descriptor;
- static {
- java.lang.String[] descriptorData = {
- "\n\026test_rpc_service.proto\032\ntest.proto2\250\001\n" +
- "\024TestProtobufRpcProto\022/\n\004ping\022\022.EmptyReq" +
- "uestProto\032\023.EmptyResponseProto\022-\n\004echo\022\021" +
- ".EchoRequestProto\032\022.EchoResponseProto\0220\n" +
- "\005error\022\022.EmptyRequestProto\032\023.EmptyRespon" +
- "seProto2y\n\025TestProtobufRpc2Proto\0220\n\005ping" +
- "2\022\022.EmptyRequestProto\032\023.EmptyResponsePro" +
- "to\022.\n\005echo2\022\021.EchoRequestProto\032\022.EchoRes" +
- "ponseProtoB<\n\036org.apache.hadoop.ipc.prot" +
- "obufB\024TestRpcServiceProtos\210\001\001\240\001\001"
- };
- com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
- new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
- public com.google.protobuf.ExtensionRegistry assignDescriptors(
- com.google.protobuf.Descriptors.FileDescriptor root) {
- descriptor = root;
- return null;
- }
- };
- com.google.protobuf.Descriptors.FileDescriptor
- .internalBuildGeneratedFileFrom(descriptorData,
- new com.google.protobuf.Descriptors.FileDescriptor[] {
- org.apache.hadoop.ipc.protobuf.TestProtos.getDescriptor(),
- }, assigner);
- }
-
- // @@protoc_insertion_point(outer_class_scope)
-}