+ implements org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2OrBuilder {
+ public static final com.google.protobuf.Descriptors.Descriptor
+ getDescriptor() {
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepResponseProto2_descriptor;
+ }
+
+ protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internalGetFieldAccessorTable() {
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepResponseProto2_fieldAccessorTable
+ .ensureFieldAccessorsInitialized(
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.class, org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.Builder.class);
+ }
+
+ // Construct using org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.newBuilder()
+ private Builder() {
+ maybeForceBuilderInitialization();
+ }
+
+ private Builder(
+ com.google.protobuf.GeneratedMessage.BuilderParent parent) {
+ super(parent);
+ maybeForceBuilderInitialization();
+ }
+ private void maybeForceBuilderInitialization() {
+ if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
+ }
+ }
+ private static Builder create() {
+ return new Builder();
+ }
+
+ public Builder clear() {
+ super.clear();
+ receiveTime_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000001);
+ responseTime_ = 0L;
+ bitField0_ = (bitField0_ & ~0x00000002);
+ return this;
+ }
+
+ public Builder clone() {
+ return create().mergeFrom(buildPartial());
+ }
+
+ public com.google.protobuf.Descriptors.Descriptor
+ getDescriptorForType() {
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.internal_static_hadoop_common_SleepResponseProto2_descriptor;
+ }
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 getDefaultInstanceForType() {
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.getDefaultInstance();
+ }
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 build() {
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 result = buildPartial();
+ if (!result.isInitialized()) {
+ throw newUninitializedMessageException(result);
+ }
+ return result;
+ }
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 buildPartial() {
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 result = new org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2(this);
+ int from_bitField0_ = bitField0_;
+ int to_bitField0_ = 0;
+ if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
+ to_bitField0_ |= 0x00000001;
+ }
+ result.receiveTime_ = receiveTime_;
+ if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
+ to_bitField0_ |= 0x00000002;
+ }
+ result.responseTime_ = responseTime_;
+ result.bitField0_ = to_bitField0_;
+ onBuilt();
+ return result;
+ }
+
+ public Builder mergeFrom(com.google.protobuf.Message other) {
+ if (other instanceof org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2) {
+ return mergeFrom((org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2)other);
+ } else {
+ super.mergeFrom(other);
+ return this;
+ }
+ }
+
+ public Builder mergeFrom(org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 other) {
+ if (other == org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2.getDefaultInstance()) return this;
+ if (other.hasReceiveTime()) {
+ setReceiveTime(other.getReceiveTime());
+ }
+ if (other.hasResponseTime()) {
+ setResponseTime(other.getResponseTime());
+ }
+ this.mergeUnknownFields(other.getUnknownFields());
+ return this;
+ }
+
+ public final boolean isInitialized() {
+ return true;
+ }
+
+ public Builder mergeFrom(
+ com.google.protobuf.CodedInputStream input,
+ com.google.protobuf.ExtensionRegistryLite extensionRegistry)
+ throws java.io.IOException {
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2 parsedMessage = null;
+ try {
+ parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ parsedMessage = (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto2) e.getUnfinishedMessage();
+ throw e;
+ } finally {
+ if (parsedMessage != null) {
+ mergeFrom(parsedMessage);
+ }
+ }
+ return this;
+ }
+ private int bitField0_;
+
+ // optional int64 receive_time = 1;
+ private long receiveTime_ ;
+ /**
+ * optional int64 receive_time = 1;
+ */
+ public boolean hasReceiveTime() {
+ return ((bitField0_ & 0x00000001) == 0x00000001);
+ }
+ /**
+ * optional int64 receive_time = 1;
+ */
+ public long getReceiveTime() {
+ return receiveTime_;
+ }
+ /**
+ * optional int64 receive_time = 1;
+ */
+ public Builder setReceiveTime(long value) {
+ bitField0_ |= 0x00000001;
+ receiveTime_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * optional int64 receive_time = 1;
+ */
+ public Builder clearReceiveTime() {
+ bitField0_ = (bitField0_ & ~0x00000001);
+ receiveTime_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // optional int64 response_time = 2;
+ private long responseTime_ ;
+ /**
+ * optional int64 response_time = 2;
+ */
+ public boolean hasResponseTime() {
+ return ((bitField0_ & 0x00000002) == 0x00000002);
+ }
+ /**
+ * optional int64 response_time = 2;
+ */
+ public long getResponseTime() {
+ return responseTime_;
+ }
+ /**
+ * optional int64 response_time = 2;
+ */
+ public Builder setResponseTime(long value) {
+ bitField0_ |= 0x00000002;
+ responseTime_ = value;
+ onChanged();
+ return this;
+ }
+ /**
+ * optional int64 response_time = 2;
+ */
+ public Builder clearResponseTime() {
+ bitField0_ = (bitField0_ & ~0x00000002);
+ responseTime_ = 0L;
+ onChanged();
+ return this;
+ }
+
+ // @@protoc_insertion_point(builder_scope:hadoop.common.SleepResponseProto2)
+ }
+
+ static {
+ defaultInstance = new SleepResponseProto2(true);
+ defaultInstance.initFields();
+ }
+
+ // @@protoc_insertion_point(class_scope:hadoop.common.SleepResponseProto2)
+ }
+
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hadoop_common_EmptyRequestProto_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hadoop_common_EmptyResponseProto_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hadoop_common_EchoRequestProto_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hadoop_common_EchoResponseProto_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hadoop_common_OptRequestProto_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hadoop_common_OptRequestProto_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hadoop_common_OptResponseProto_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hadoop_common_OptResponseProto_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hadoop_common_SleepRequestProto_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hadoop_common_SleepRequestProto_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hadoop_common_SleepResponseProto_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hadoop_common_SleepResponseProto_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hadoop_common_SlowPingRequestProto_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hadoop_common_SlowPingRequestProto_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hadoop_common_EchoRequestProto2_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hadoop_common_EchoRequestProto2_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hadoop_common_EchoResponseProto2_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hadoop_common_EchoResponseProto2_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hadoop_common_AddRequestProto_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hadoop_common_AddRequestProto_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hadoop_common_AddRequestProto2_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hadoop_common_AddRequestProto2_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hadoop_common_AddResponseProto_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hadoop_common_AddResponseProto_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hadoop_common_ExchangeRequestProto_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hadoop_common_ExchangeRequestProto_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hadoop_common_ExchangeResponseProto_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hadoop_common_ExchangeResponseProto_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hadoop_common_AuthMethodResponseProto_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hadoop_common_AuthMethodResponseProto_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hadoop_common_UserResponseProto_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hadoop_common_UserResponseProto_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hadoop_common_SleepRequestProto2_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hadoop_common_SleepRequestProto2_fieldAccessorTable;
+ private static com.google.protobuf.Descriptors.Descriptor
+ internal_static_hadoop_common_SleepResponseProto2_descriptor;
+ private static
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable
+ internal_static_hadoop_common_SleepResponseProto2_fieldAccessorTable;
+
+ public static com.google.protobuf.Descriptors.FileDescriptor
+ getDescriptor() {
+ return descriptor;
+ }
+ private static com.google.protobuf.Descriptors.FileDescriptor
+ descriptor;
+ static {
+ java.lang.String[] descriptorData = {
+ "\n\021test_legacy.proto\022\rhadoop.common\"\023\n\021Em" +
+ "ptyRequestProto\"\024\n\022EmptyResponseProto\"#\n" +
+ "\020EchoRequestProto\022\017\n\007message\030\001 \002(\t\"$\n\021Ec" +
+ "hoResponseProto\022\017\n\007message\030\001 \002(\t\"\"\n\017OptR" +
+ "equestProto\022\017\n\007message\030\001 \001(\t\"#\n\020OptRespo" +
+ "nseProto\022\017\n\007message\030\001 \001(\t\")\n\021SleepReques" +
+ "tProto\022\024\n\014milliSeconds\030\001 \002(\005\"\024\n\022SleepRes" +
+ "ponseProto\"*\n\024SlowPingRequestProto\022\022\n\nsh" +
+ "ouldSlow\030\001 \002(\010\"$\n\021EchoRequestProto2\022\017\n\007m" +
+ "essage\030\001 \003(\t\"%\n\022EchoResponseProto2\022\017\n\007me",
+ "ssage\030\001 \003(\t\"1\n\017AddRequestProto\022\016\n\006param1" +
+ "\030\001 \002(\005\022\016\n\006param2\030\002 \002(\005\"\"\n\020AddRequestProt" +
+ "o2\022\016\n\006params\030\001 \003(\005\"\"\n\020AddResponseProto\022\016" +
+ "\n\006result\030\001 \002(\005\"&\n\024ExchangeRequestProto\022\016" +
+ "\n\006values\030\001 \003(\005\"\'\n\025ExchangeResponseProto\022" +
+ "\016\n\006values\030\001 \003(\005\">\n\027AuthMethodResponsePro" +
+ "to\022\014\n\004code\030\001 \002(\005\022\025\n\rmechanismName\030\002 \002(\t\"" +
+ "!\n\021UserResponseProto\022\014\n\004user\030\001 \002(\t\"(\n\022Sl" +
+ "eepRequestProto2\022\022\n\nsleep_time\030\001 \001(\003\"B\n\023" +
+ "SleepResponseProto2\022\024\n\014receive_time\030\001 \001(",
+ "\003\022\025\n\rresponse_time\030\002 \001(\003B5\n\036org.apache.h" +
+ "adoop.ipc.protobufB\020TestProtosLegacy\240\001\001"
+ };
+ com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
+ new com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner() {
+ public com.google.protobuf.ExtensionRegistry assignDescriptors(
+ com.google.protobuf.Descriptors.FileDescriptor root) {
+ descriptor = root;
+ internal_static_hadoop_common_EmptyRequestProto_descriptor =
+ getDescriptor().getMessageTypes().get(0);
+ internal_static_hadoop_common_EmptyRequestProto_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hadoop_common_EmptyRequestProto_descriptor,
+ new java.lang.String[] { });
+ internal_static_hadoop_common_EmptyResponseProto_descriptor =
+ getDescriptor().getMessageTypes().get(1);
+ internal_static_hadoop_common_EmptyResponseProto_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hadoop_common_EmptyResponseProto_descriptor,
+ new java.lang.String[] { });
+ internal_static_hadoop_common_EchoRequestProto_descriptor =
+ getDescriptor().getMessageTypes().get(2);
+ internal_static_hadoop_common_EchoRequestProto_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hadoop_common_EchoRequestProto_descriptor,
+ new java.lang.String[] { "Message", });
+ internal_static_hadoop_common_EchoResponseProto_descriptor =
+ getDescriptor().getMessageTypes().get(3);
+ internal_static_hadoop_common_EchoResponseProto_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hadoop_common_EchoResponseProto_descriptor,
+ new java.lang.String[] { "Message", });
+ internal_static_hadoop_common_OptRequestProto_descriptor =
+ getDescriptor().getMessageTypes().get(4);
+ internal_static_hadoop_common_OptRequestProto_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hadoop_common_OptRequestProto_descriptor,
+ new java.lang.String[] { "Message", });
+ internal_static_hadoop_common_OptResponseProto_descriptor =
+ getDescriptor().getMessageTypes().get(5);
+ internal_static_hadoop_common_OptResponseProto_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hadoop_common_OptResponseProto_descriptor,
+ new java.lang.String[] { "Message", });
+ internal_static_hadoop_common_SleepRequestProto_descriptor =
+ getDescriptor().getMessageTypes().get(6);
+ internal_static_hadoop_common_SleepRequestProto_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hadoop_common_SleepRequestProto_descriptor,
+ new java.lang.String[] { "MilliSeconds", });
+ internal_static_hadoop_common_SleepResponseProto_descriptor =
+ getDescriptor().getMessageTypes().get(7);
+ internal_static_hadoop_common_SleepResponseProto_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hadoop_common_SleepResponseProto_descriptor,
+ new java.lang.String[] { });
+ internal_static_hadoop_common_SlowPingRequestProto_descriptor =
+ getDescriptor().getMessageTypes().get(8);
+ internal_static_hadoop_common_SlowPingRequestProto_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hadoop_common_SlowPingRequestProto_descriptor,
+ new java.lang.String[] { "ShouldSlow", });
+ internal_static_hadoop_common_EchoRequestProto2_descriptor =
+ getDescriptor().getMessageTypes().get(9);
+ internal_static_hadoop_common_EchoRequestProto2_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hadoop_common_EchoRequestProto2_descriptor,
+ new java.lang.String[] { "Message", });
+ internal_static_hadoop_common_EchoResponseProto2_descriptor =
+ getDescriptor().getMessageTypes().get(10);
+ internal_static_hadoop_common_EchoResponseProto2_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hadoop_common_EchoResponseProto2_descriptor,
+ new java.lang.String[] { "Message", });
+ internal_static_hadoop_common_AddRequestProto_descriptor =
+ getDescriptor().getMessageTypes().get(11);
+ internal_static_hadoop_common_AddRequestProto_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hadoop_common_AddRequestProto_descriptor,
+ new java.lang.String[] { "Param1", "Param2", });
+ internal_static_hadoop_common_AddRequestProto2_descriptor =
+ getDescriptor().getMessageTypes().get(12);
+ internal_static_hadoop_common_AddRequestProto2_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hadoop_common_AddRequestProto2_descriptor,
+ new java.lang.String[] { "Params", });
+ internal_static_hadoop_common_AddResponseProto_descriptor =
+ getDescriptor().getMessageTypes().get(13);
+ internal_static_hadoop_common_AddResponseProto_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hadoop_common_AddResponseProto_descriptor,
+ new java.lang.String[] { "Result", });
+ internal_static_hadoop_common_ExchangeRequestProto_descriptor =
+ getDescriptor().getMessageTypes().get(14);
+ internal_static_hadoop_common_ExchangeRequestProto_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hadoop_common_ExchangeRequestProto_descriptor,
+ new java.lang.String[] { "Values", });
+ internal_static_hadoop_common_ExchangeResponseProto_descriptor =
+ getDescriptor().getMessageTypes().get(15);
+ internal_static_hadoop_common_ExchangeResponseProto_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hadoop_common_ExchangeResponseProto_descriptor,
+ new java.lang.String[] { "Values", });
+ internal_static_hadoop_common_AuthMethodResponseProto_descriptor =
+ getDescriptor().getMessageTypes().get(16);
+ internal_static_hadoop_common_AuthMethodResponseProto_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hadoop_common_AuthMethodResponseProto_descriptor,
+ new java.lang.String[] { "Code", "MechanismName", });
+ internal_static_hadoop_common_UserResponseProto_descriptor =
+ getDescriptor().getMessageTypes().get(17);
+ internal_static_hadoop_common_UserResponseProto_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hadoop_common_UserResponseProto_descriptor,
+ new java.lang.String[] { "User", });
+ internal_static_hadoop_common_SleepRequestProto2_descriptor =
+ getDescriptor().getMessageTypes().get(18);
+ internal_static_hadoop_common_SleepRequestProto2_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hadoop_common_SleepRequestProto2_descriptor,
+ new java.lang.String[] { "SleepTime", });
+ internal_static_hadoop_common_SleepResponseProto2_descriptor =
+ getDescriptor().getMessageTypes().get(19);
+ internal_static_hadoop_common_SleepResponseProto2_fieldAccessorTable = new
+ com.google.protobuf.GeneratedMessage.FieldAccessorTable(
+ internal_static_hadoop_common_SleepResponseProto2_descriptor,
+ new java.lang.String[] { "ReceiveTime", "ResponseTime", });
+ return null;
+ }
+ };
+ com.google.protobuf.Descriptors.FileDescriptor
+ .internalBuildGeneratedFileFrom(descriptorData,
+ new com.google.protobuf.Descriptors.FileDescriptor[] {
+ }, assigner);
+ }
+
+ // @@protoc_insertion_point(outer_class_scope)
+}
diff --git a/hadoop-common-project/hadoop-common/src/test/arm-java/org/apache/hadoop/ipc/protobuf/TestRpcServiceProtosLegacy.java b/hadoop-common-project/hadoop-common/src/test/arm-java/org/apache/hadoop/ipc/protobuf/TestRpcServiceProtosLegacy.java
new file mode 100644
index 0000000000..26cef9c755
--- /dev/null
+++ b/hadoop-common-project/hadoop-common/src/test/arm-java/org/apache/hadoop/ipc/protobuf/TestRpcServiceProtosLegacy.java
@@ -0,0 +1,3313 @@
+/**
+ * Licensed to the Apache Software Foundation (ASF) under one
+ * or more contributor license agreements. See the NOTICE file
+ * distributed with this work for additional information
+ * regarding copyright ownership. The ASF licenses this file
+ * to you under the Apache License, Version 2.0 (the
+ * "License"); you may not use this file except in compliance
+ * with the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+// This is class is added to source because for arm protoc 2.5.0 executable
+// is not available to generate the same code.
+// Generated by the protocol buffer compiler. DO NOT EDIT!
+// source: test_rpc_service_legacy.proto
+
+package org.apache.hadoop.ipc.protobuf;
+
+public final class TestRpcServiceProtosLegacy {
+ private TestRpcServiceProtosLegacy() {}
+ public static void registerAllExtensions(
+ com.google.protobuf.ExtensionRegistry registry) {
+ }
+ /**
+ * Protobuf service {@code hadoop.common.TestProtobufRpcProto}
+ *
+ *
+ **
+ * A protobuf service for use in tests
+ *
+ */
+ public static abstract class TestProtobufRpcProto
+ implements com.google.protobuf.Service {
+ protected TestProtobufRpcProto() {}
+
+ public interface Interface {
+ /**
+ * rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc echo(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);
+ */
+ public abstract void echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc error(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void error(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc error2(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void error2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc slowPing(.hadoop.common.SlowPingRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void slowPing(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc echo2(.hadoop.common.EchoRequestProto2) returns (.hadoop.common.EchoResponseProto2);
+ */
+ public abstract void echo2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc add(.hadoop.common.AddRequestProto) returns (.hadoop.common.AddResponseProto);
+ */
+ public abstract void add(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc add2(.hadoop.common.AddRequestProto2) returns (.hadoop.common.AddResponseProto);
+ */
+ public abstract void add2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc testServerGet(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void testServerGet(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc exchange(.hadoop.common.ExchangeRequestProto) returns (.hadoop.common.ExchangeResponseProto);
+ */
+ public abstract void exchange(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc sleep(.hadoop.common.SleepRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void sleep(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc lockAndSleep(.hadoop.common.SleepRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void lockAndSleep(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc getAuthMethod(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.AuthMethodResponseProto);
+ */
+ public abstract void getAuthMethod(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc getAuthUser(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.UserResponseProto);
+ */
+ public abstract void getAuthUser(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc echoPostponed(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);
+ */
+ public abstract void echoPostponed(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc sendPostponed(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void sendPostponed(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc getCurrentUser(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.UserResponseProto);
+ */
+ public abstract void getCurrentUser(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc getServerRemoteUser(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.UserResponseProto);
+ */
+ public abstract void getServerRemoteUser(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ }
+
+ public static com.google.protobuf.Service newReflectiveService(
+ final Interface impl) {
+ return new TestProtobufRpcProto() {
+ @java.lang.Override
+ public void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ impl.ping(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ impl.echo(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void error(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ impl.error(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void error2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ impl.error2(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void slowPing(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ impl.slowPing(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void echo2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 request,
+ com.google.protobuf.RpcCallback done) {
+ impl.echo2(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void add(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ impl.add(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void add2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 request,
+ com.google.protobuf.RpcCallback done) {
+ impl.add2(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void testServerGet(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ impl.testServerGet(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void exchange(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ impl.exchange(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void sleep(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ impl.sleep(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void lockAndSleep(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ impl.lockAndSleep(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void getAuthMethod(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ impl.getAuthMethod(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void getAuthUser(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ impl.getAuthUser(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void echoPostponed(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ impl.echoPostponed(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void sendPostponed(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ impl.sendPostponed(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void getCurrentUser(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ impl.getCurrentUser(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void getServerRemoteUser(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ impl.getServerRemoteUser(controller, request, done);
+ }
+
+ };
+ }
+
+ public static com.google.protobuf.BlockingService
+ newReflectiveBlockingService(final BlockingInterface impl) {
+ return new com.google.protobuf.BlockingService() {
+ public final com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+
+ public final com.google.protobuf.Message callBlockingMethod(
+ com.google.protobuf.Descriptors.MethodDescriptor method,
+ com.google.protobuf.RpcController controller,
+ com.google.protobuf.Message request)
+ throws com.google.protobuf.ServiceException {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.callBlockingMethod() given method descriptor for " +
+ "wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return impl.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+ case 1:
+ return impl.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto)request);
+ case 2:
+ return impl.error(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+ case 3:
+ return impl.error2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+ case 4:
+ return impl.slowPing(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto)request);
+ case 5:
+ return impl.echo2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2)request);
+ case 6:
+ return impl.add(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto)request);
+ case 7:
+ return impl.add2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2)request);
+ case 8:
+ return impl.testServerGet(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+ case 9:
+ return impl.exchange(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto)request);
+ case 10:
+ return impl.sleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto)request);
+ case 11:
+ return impl.lockAndSleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto)request);
+ case 12:
+ return impl.getAuthMethod(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+ case 13:
+ return impl.getAuthUser(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+ case 14:
+ return impl.echoPostponed(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto)request);
+ case 15:
+ return impl.sendPostponed(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+ case 16:
+ return impl.getCurrentUser(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+ case 17:
+ return impl.getServerRemoteUser(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getRequestPrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getRequestPrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ case 3:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ case 4:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto.getDefaultInstance();
+ case 5:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2.getDefaultInstance();
+ case 6:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto.getDefaultInstance();
+ case 7:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2.getDefaultInstance();
+ case 8:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ case 9:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto.getDefaultInstance();
+ case 10:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.getDefaultInstance();
+ case 11:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.getDefaultInstance();
+ case 12:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ case 13:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ case 14:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.getDefaultInstance();
+ case 15:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ case 16:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ case 17:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getResponsePrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getResponsePrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ case 3:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ case 4:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ case 5:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.getDefaultInstance();
+ case 6:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance();
+ case 7:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance();
+ case 8:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ case 9:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.getDefaultInstance();
+ case 10:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ case 11:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ case 12:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.getDefaultInstance();
+ case 13:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance();
+ case 14:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance();
+ case 15:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ case 16:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance();
+ case 17:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ };
+ }
+
+ /**
+ * rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc echo(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);
+ */
+ public abstract void echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc error(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void error(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc error2(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void error2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc slowPing(.hadoop.common.SlowPingRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void slowPing(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc echo2(.hadoop.common.EchoRequestProto2) returns (.hadoop.common.EchoResponseProto2);
+ */
+ public abstract void echo2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc add(.hadoop.common.AddRequestProto) returns (.hadoop.common.AddResponseProto);
+ */
+ public abstract void add(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc add2(.hadoop.common.AddRequestProto2) returns (.hadoop.common.AddResponseProto);
+ */
+ public abstract void add2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc testServerGet(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void testServerGet(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc exchange(.hadoop.common.ExchangeRequestProto) returns (.hadoop.common.ExchangeResponseProto);
+ */
+ public abstract void exchange(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc sleep(.hadoop.common.SleepRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void sleep(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc lockAndSleep(.hadoop.common.SleepRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void lockAndSleep(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc getAuthMethod(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.AuthMethodResponseProto);
+ */
+ public abstract void getAuthMethod(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc getAuthUser(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.UserResponseProto);
+ */
+ public abstract void getAuthUser(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc echoPostponed(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);
+ */
+ public abstract void echoPostponed(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc sendPostponed(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void sendPostponed(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc getCurrentUser(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.UserResponseProto);
+ */
+ public abstract void getCurrentUser(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc getServerRemoteUser(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.UserResponseProto);
+ */
+ public abstract void getServerRemoteUser(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ public static final
+ com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.getDescriptor().getServices().get(0);
+ }
+ public final com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+
+ public final void callMethod(
+ com.google.protobuf.Descriptors.MethodDescriptor method,
+ com.google.protobuf.RpcController controller,
+ com.google.protobuf.Message request,
+ com.google.protobuf.RpcCallback<
+ com.google.protobuf.Message> done) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.callMethod() given method descriptor for wrong " +
+ "service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ this.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 1:
+ this.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 2:
+ this.error(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 3:
+ this.error2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 4:
+ this.slowPing(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 5:
+ this.echo2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 6:
+ this.add(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 7:
+ this.add2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 8:
+ this.testServerGet(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 9:
+ this.exchange(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 10:
+ this.sleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 11:
+ this.lockAndSleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 12:
+ this.getAuthMethod(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 13:
+ this.getAuthUser(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 14:
+ this.echoPostponed(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 15:
+ this.sendPostponed(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 16:
+ this.getCurrentUser(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 17:
+ this.getServerRemoteUser(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getRequestPrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getRequestPrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ case 3:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ case 4:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto.getDefaultInstance();
+ case 5:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2.getDefaultInstance();
+ case 6:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto.getDefaultInstance();
+ case 7:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2.getDefaultInstance();
+ case 8:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ case 9:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto.getDefaultInstance();
+ case 10:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.getDefaultInstance();
+ case 11:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.getDefaultInstance();
+ case 12:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ case 13:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ case 14:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.getDefaultInstance();
+ case 15:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ case 16:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ case 17:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getResponsePrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getResponsePrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ case 3:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ case 4:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ case 5:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.getDefaultInstance();
+ case 6:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance();
+ case 7:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance();
+ case 8:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ case 9:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.getDefaultInstance();
+ case 10:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ case 11:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ case 12:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.getDefaultInstance();
+ case 13:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance();
+ case 14:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance();
+ case 15:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ case 16:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance();
+ case 17:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public static Stub newStub(
+ com.google.protobuf.RpcChannel channel) {
+ return new Stub(channel);
+ }
+
+ public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.TestProtobufRpcProto implements Interface {
+ private Stub(com.google.protobuf.RpcChannel channel) {
+ this.channel = channel;
+ }
+
+ private final com.google.protobuf.RpcChannel channel;
+
+ public com.google.protobuf.RpcChannel getChannel() {
+ return channel;
+ }
+
+ public void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(0),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
+ }
+
+ public void echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(1),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance()));
+ }
+
+ public void error(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(2),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
+ }
+
+ public void error2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(3),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
+ }
+
+ public void slowPing(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(4),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
+ }
+
+ public void echo2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(5),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.class,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.getDefaultInstance()));
+ }
+
+ public void add(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(6),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance()));
+ }
+
+ public void add2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(7),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance()));
+ }
+
+ public void testServerGet(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(8),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
+ }
+
+ public void exchange(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(9),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.getDefaultInstance()));
+ }
+
+ public void sleep(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(10),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
+ }
+
+ public void lockAndSleep(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(11),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
+ }
+
+ public void getAuthMethod(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(12),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.getDefaultInstance()));
+ }
+
+ public void getAuthUser(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(13),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance()));
+ }
+
+ public void echoPostponed(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(14),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance()));
+ }
+
+ public void sendPostponed(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(15),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
+ }
+
+ public void getCurrentUser(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(16),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance()));
+ }
+
+ public void getServerRemoteUser(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(17),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance()));
+ }
+ }
+
+ public static BlockingInterface newBlockingStub(
+ com.google.protobuf.BlockingRpcChannel channel) {
+ return new BlockingStub(channel);
+ }
+
+ public interface BlockingInterface {
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto error(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto error2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto slowPing(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 echo2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto add(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto add2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto testServerGet(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto exchange(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto sleep(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto lockAndSleep(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto getAuthMethod(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto getAuthUser(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto echoPostponed(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto sendPostponed(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto getCurrentUser(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto getServerRemoteUser(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException;
+ }
+
+ private static final class BlockingStub implements BlockingInterface {
+ private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
+ this.channel = channel;
+ }
+
+ private final com.google.protobuf.BlockingRpcChannel channel;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(0),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(1),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto error(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(2),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto error2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(3),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto slowPing(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SlowPingRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(4),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2 echo2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto2 request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(5),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto2.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto add(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(6),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto add2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddRequestProto2 request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(7),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AddResponseProto.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto testServerGet(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(8),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto exchange(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(9),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.ExchangeResponseProto.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto sleep(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(10),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto lockAndSleep(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(11),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto getAuthMethod(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(12),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.AuthMethodResponseProto.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto getAuthUser(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(13),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto echoPostponed(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(14),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto sendPostponed(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(15),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto getCurrentUser(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(16),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto getServerRemoteUser(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(17),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.UserResponseProto.getDefaultInstance());
+ }
+
+ }
+
+ // @@protoc_insertion_point(class_scope:hadoop.common.TestProtobufRpcProto)
+ }
+
+ /**
+ * Protobuf service {@code hadoop.common.TestProtobufRpc2Proto}
+ */
+ public static abstract class TestProtobufRpc2Proto
+ implements com.google.protobuf.Service {
+ protected TestProtobufRpc2Proto() {}
+
+ public interface Interface {
+ /**
+ * rpc ping2(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void ping2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc echo2(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);
+ */
+ public abstract void echo2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc sleep(.hadoop.common.SleepRequestProto) returns (.hadoop.common.SleepResponseProto);
+ */
+ public abstract void sleep(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ }
+
+ public static com.google.protobuf.Service newReflectiveService(
+ final Interface impl) {
+ return new TestProtobufRpc2Proto() {
+ @java.lang.Override
+ public void ping2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ impl.ping2(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void echo2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ impl.echo2(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void sleep(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ impl.sleep(controller, request, done);
+ }
+
+ };
+ }
+
+ public static com.google.protobuf.BlockingService
+ newReflectiveBlockingService(final BlockingInterface impl) {
+ return new com.google.protobuf.BlockingService() {
+ public final com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+
+ public final com.google.protobuf.Message callBlockingMethod(
+ com.google.protobuf.Descriptors.MethodDescriptor method,
+ com.google.protobuf.RpcController controller,
+ com.google.protobuf.Message request)
+ throws com.google.protobuf.ServiceException {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.callBlockingMethod() given method descriptor for " +
+ "wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return impl.ping2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+ case 1:
+ return impl.echo2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto)request);
+ case 2:
+ return impl.sleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto)request);
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getRequestPrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getRequestPrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getResponsePrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getResponsePrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ };
+ }
+
+ /**
+ * rpc ping2(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void ping2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc echo2(.hadoop.common.EchoRequestProto) returns (.hadoop.common.EchoResponseProto);
+ */
+ public abstract void echo2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc sleep(.hadoop.common.SleepRequestProto) returns (.hadoop.common.SleepResponseProto);
+ */
+ public abstract void sleep(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ public static final
+ com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.getDescriptor().getServices().get(1);
+ }
+ public final com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+
+ public final void callMethod(
+ com.google.protobuf.Descriptors.MethodDescriptor method,
+ com.google.protobuf.RpcController controller,
+ com.google.protobuf.Message request,
+ com.google.protobuf.RpcCallback<
+ com.google.protobuf.Message> done) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.callMethod() given method descriptor for wrong " +
+ "service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ this.ping2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 1:
+ this.echo2(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 2:
+ this.sleep(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getRequestPrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getRequestPrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getResponsePrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getResponsePrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance();
+ case 2:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public static Stub newStub(
+ com.google.protobuf.RpcChannel channel) {
+ return new Stub(channel);
+ }
+
+ public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.TestProtobufRpc2Proto implements Interface {
+ private Stub(com.google.protobuf.RpcChannel channel) {
+ this.channel = channel;
+ }
+
+ private final com.google.protobuf.RpcChannel channel;
+
+ public com.google.protobuf.RpcChannel getChannel() {
+ return channel;
+ }
+
+ public void ping2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(0),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
+ }
+
+ public void echo2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(1),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance()));
+ }
+
+ public void sleep(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(2),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.getDefaultInstance()));
+ }
+ }
+
+ public static BlockingInterface newBlockingStub(
+ com.google.protobuf.BlockingRpcChannel channel) {
+ return new BlockingStub(channel);
+ }
+
+ public interface BlockingInterface {
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto echo2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto sleep(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request)
+ throws com.google.protobuf.ServiceException;
+ }
+
+ private static final class BlockingStub implements BlockingInterface {
+ private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
+ this.channel = channel;
+ }
+
+ private final com.google.protobuf.BlockingRpcChannel channel;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(0),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto echo2(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(1),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EchoResponseProto.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto sleep(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(2),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.SleepResponseProto.getDefaultInstance());
+ }
+
+ }
+
+ // @@protoc_insertion_point(class_scope:hadoop.common.TestProtobufRpc2Proto)
+ }
+
+ /**
+ * Protobuf service {@code hadoop.common.OldProtobufRpcProto}
+ */
+ public static abstract class OldProtobufRpcProto
+ implements com.google.protobuf.Service {
+ protected OldProtobufRpcProto() {}
+
+ public interface Interface {
+ /**
+ * rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc echo(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ }
+
+ public static com.google.protobuf.Service newReflectiveService(
+ final Interface impl) {
+ return new OldProtobufRpcProto() {
+ @java.lang.Override
+ public void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ impl.ping(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ impl.echo(controller, request, done);
+ }
+
+ };
+ }
+
+ public static com.google.protobuf.BlockingService
+ newReflectiveBlockingService(final BlockingInterface impl) {
+ return new com.google.protobuf.BlockingService() {
+ public final com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+
+ public final com.google.protobuf.Message callBlockingMethod(
+ com.google.protobuf.Descriptors.MethodDescriptor method,
+ com.google.protobuf.RpcController controller,
+ com.google.protobuf.Message request)
+ throws com.google.protobuf.ServiceException {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.callBlockingMethod() given method descriptor for " +
+ "wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return impl.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+ case 1:
+ return impl.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getRequestPrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getRequestPrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getResponsePrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getResponsePrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ };
+ }
+
+ /**
+ * rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc echo(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ public static final
+ com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.getDescriptor().getServices().get(2);
+ }
+ public final com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+
+ public final void callMethod(
+ com.google.protobuf.Descriptors.MethodDescriptor method,
+ com.google.protobuf.RpcController controller,
+ com.google.protobuf.Message request,
+ com.google.protobuf.RpcCallback<
+ com.google.protobuf.Message> done) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.callMethod() given method descriptor for wrong " +
+ "service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ this.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 1:
+ this.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getRequestPrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getRequestPrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getResponsePrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getResponsePrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public static Stub newStub(
+ com.google.protobuf.RpcChannel channel) {
+ return new Stub(channel);
+ }
+
+ public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.OldProtobufRpcProto implements Interface {
+ private Stub(com.google.protobuf.RpcChannel channel) {
+ this.channel = channel;
+ }
+
+ private final com.google.protobuf.RpcChannel channel;
+
+ public com.google.protobuf.RpcChannel getChannel() {
+ return channel;
+ }
+
+ public void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(0),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
+ }
+
+ public void echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(1),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
+ }
+ }
+
+ public static BlockingInterface newBlockingStub(
+ com.google.protobuf.BlockingRpcChannel channel) {
+ return new BlockingStub(channel);
+ }
+
+ public interface BlockingInterface {
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException;
+ }
+
+ private static final class BlockingStub implements BlockingInterface {
+ private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
+ this.channel = channel;
+ }
+
+ private final com.google.protobuf.BlockingRpcChannel channel;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(0),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(1),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
+ }
+
+ }
+
+ // @@protoc_insertion_point(class_scope:hadoop.common.OldProtobufRpcProto)
+ }
+
+ /**
+ * Protobuf service {@code hadoop.common.NewProtobufRpcProto}
+ */
+ public static abstract class NewProtobufRpcProto
+ implements com.google.protobuf.Service {
+ protected NewProtobufRpcProto() {}
+
+ public interface Interface {
+ /**
+ * rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc echo(.hadoop.common.OptRequestProto) returns (.hadoop.common.OptResponseProto);
+ */
+ public abstract void echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ }
+
+ public static com.google.protobuf.Service newReflectiveService(
+ final Interface impl) {
+ return new NewProtobufRpcProto() {
+ @java.lang.Override
+ public void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ impl.ping(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ impl.echo(controller, request, done);
+ }
+
+ };
+ }
+
+ public static com.google.protobuf.BlockingService
+ newReflectiveBlockingService(final BlockingInterface impl) {
+ return new com.google.protobuf.BlockingService() {
+ public final com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+
+ public final com.google.protobuf.Message callBlockingMethod(
+ com.google.protobuf.Descriptors.MethodDescriptor method,
+ com.google.protobuf.RpcController controller,
+ com.google.protobuf.Message request)
+ throws com.google.protobuf.ServiceException {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.callBlockingMethod() given method descriptor for " +
+ "wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return impl.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+ case 1:
+ return impl.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto)request);
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getRequestPrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getRequestPrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getResponsePrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getResponsePrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ };
+ }
+
+ /**
+ * rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc echo(.hadoop.common.OptRequestProto) returns (.hadoop.common.OptResponseProto);
+ */
+ public abstract void echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ public static final
+ com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.getDescriptor().getServices().get(3);
+ }
+ public final com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+
+ public final void callMethod(
+ com.google.protobuf.Descriptors.MethodDescriptor method,
+ com.google.protobuf.RpcController controller,
+ com.google.protobuf.Message request,
+ com.google.protobuf.RpcCallback<
+ com.google.protobuf.Message> done) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.callMethod() given method descriptor for wrong " +
+ "service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ this.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 1:
+ this.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getRequestPrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getRequestPrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getResponsePrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getResponsePrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public static Stub newStub(
+ com.google.protobuf.RpcChannel channel) {
+ return new Stub(channel);
+ }
+
+ public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.NewProtobufRpcProto implements Interface {
+ private Stub(com.google.protobuf.RpcChannel channel) {
+ this.channel = channel;
+ }
+
+ private final com.google.protobuf.RpcChannel channel;
+
+ public com.google.protobuf.RpcChannel getChannel() {
+ return channel;
+ }
+
+ public void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(0),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
+ }
+
+ public void echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(1),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.getDefaultInstance()));
+ }
+ }
+
+ public static BlockingInterface newBlockingStub(
+ com.google.protobuf.BlockingRpcChannel channel) {
+ return new BlockingStub(channel);
+ }
+
+ public interface BlockingInterface {
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto request)
+ throws com.google.protobuf.ServiceException;
+ }
+
+ private static final class BlockingStub implements BlockingInterface {
+ private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
+ this.channel = channel;
+ }
+
+ private final com.google.protobuf.BlockingRpcChannel channel;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(0),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(1),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.OptResponseProto.getDefaultInstance());
+ }
+
+ }
+
+ // @@protoc_insertion_point(class_scope:hadoop.common.NewProtobufRpcProto)
+ }
+
+ /**
+ * Protobuf service {@code hadoop.common.NewerProtobufRpcProto}
+ */
+ public static abstract class NewerProtobufRpcProto
+ implements com.google.protobuf.Service {
+ protected NewerProtobufRpcProto() {}
+
+ public interface Interface {
+ /**
+ * rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc echo(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ }
+
+ public static com.google.protobuf.Service newReflectiveService(
+ final Interface impl) {
+ return new NewerProtobufRpcProto() {
+ @java.lang.Override
+ public void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ impl.ping(controller, request, done);
+ }
+
+ @java.lang.Override
+ public void echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ impl.echo(controller, request, done);
+ }
+
+ };
+ }
+
+ public static com.google.protobuf.BlockingService
+ newReflectiveBlockingService(final BlockingInterface impl) {
+ return new com.google.protobuf.BlockingService() {
+ public final com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+
+ public final com.google.protobuf.Message callBlockingMethod(
+ com.google.protobuf.Descriptors.MethodDescriptor method,
+ com.google.protobuf.RpcController controller,
+ com.google.protobuf.Message request)
+ throws com.google.protobuf.ServiceException {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.callBlockingMethod() given method descriptor for " +
+ "wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return impl.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+ case 1:
+ return impl.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getRequestPrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getRequestPrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getResponsePrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getResponsePrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ };
+ }
+
+ /**
+ * rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ /**
+ * rpc echo(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ public static final
+ com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.getDescriptor().getServices().get(4);
+ }
+ public final com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+
+ public final void callMethod(
+ com.google.protobuf.Descriptors.MethodDescriptor method,
+ com.google.protobuf.RpcController controller,
+ com.google.protobuf.Message request,
+ com.google.protobuf.RpcCallback<
+ com.google.protobuf.Message> done) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.callMethod() given method descriptor for wrong " +
+ "service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ this.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ case 1:
+ this.echo(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getRequestPrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getRequestPrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getResponsePrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getResponsePrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ case 1:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public static Stub newStub(
+ com.google.protobuf.RpcChannel channel) {
+ return new Stub(channel);
+ }
+
+ public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.NewerProtobufRpcProto implements Interface {
+ private Stub(com.google.protobuf.RpcChannel channel) {
+ this.channel = channel;
+ }
+
+ private final com.google.protobuf.RpcChannel channel;
+
+ public com.google.protobuf.RpcChannel getChannel() {
+ return channel;
+ }
+
+ public void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(0),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
+ }
+
+ public void echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ channel.callMethod(
+ getDescriptor().getMethods().get(1),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance(),
+ com.google.protobuf.RpcUtil.generalizeCallback(
+ done,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.class,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance()));
+ }
+ }
+
+ public static BlockingInterface newBlockingStub(
+ com.google.protobuf.BlockingRpcChannel channel) {
+ return new BlockingStub(channel);
+ }
+
+ public interface BlockingInterface {
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException;
+ }
+
+ private static final class BlockingStub implements BlockingInterface {
+ private BlockingStub(com.google.protobuf.BlockingRpcChannel channel) {
+ this.channel = channel;
+ }
+
+ private final com.google.protobuf.BlockingRpcChannel channel;
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(0),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
+ }
+
+
+ public org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto echo(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request)
+ throws com.google.protobuf.ServiceException {
+ return (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto) channel.callBlockingMethod(
+ getDescriptor().getMethods().get(1),
+ controller,
+ request,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance());
+ }
+
+ }
+
+ // @@protoc_insertion_point(class_scope:hadoop.common.NewerProtobufRpcProto)
+ }
+
+ /**
+ * Protobuf service {@code hadoop.common.CustomProto}
+ */
+ public static abstract class CustomProto
+ implements com.google.protobuf.Service {
+ protected CustomProto() {}
+
+ public interface Interface {
+ /**
+ * rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ }
+
+ public static com.google.protobuf.Service newReflectiveService(
+ final Interface impl) {
+ return new CustomProto() {
+ @java.lang.Override
+ public void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done) {
+ impl.ping(controller, request, done);
+ }
+
+ };
+ }
+
+ public static com.google.protobuf.BlockingService
+ newReflectiveBlockingService(final BlockingInterface impl) {
+ return new com.google.protobuf.BlockingService() {
+ public final com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+
+ public final com.google.protobuf.Message callBlockingMethod(
+ com.google.protobuf.Descriptors.MethodDescriptor method,
+ com.google.protobuf.RpcController controller,
+ com.google.protobuf.Message request)
+ throws com.google.protobuf.ServiceException {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.callBlockingMethod() given method descriptor for " +
+ "wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return impl.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request);
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getRequestPrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getRequestPrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getResponsePrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getResponsePrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ };
+ }
+
+ /**
+ * rpc ping(.hadoop.common.EmptyRequestProto) returns (.hadoop.common.EmptyResponseProto);
+ */
+ public abstract void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback done);
+
+ public static final
+ com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptor() {
+ return org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.getDescriptor().getServices().get(5);
+ }
+ public final com.google.protobuf.Descriptors.ServiceDescriptor
+ getDescriptorForType() {
+ return getDescriptor();
+ }
+
+ public final void callMethod(
+ com.google.protobuf.Descriptors.MethodDescriptor method,
+ com.google.protobuf.RpcController controller,
+ com.google.protobuf.Message request,
+ com.google.protobuf.RpcCallback<
+ com.google.protobuf.Message> done) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.callMethod() given method descriptor for wrong " +
+ "service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ this.ping(controller, (org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto)request,
+ com.google.protobuf.RpcUtil.specializeCallback(
+ done));
+ return;
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getRequestPrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getRequestPrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public final com.google.protobuf.Message
+ getResponsePrototype(
+ com.google.protobuf.Descriptors.MethodDescriptor method) {
+ if (method.getService() != getDescriptor()) {
+ throw new java.lang.IllegalArgumentException(
+ "Service.getResponsePrototype() given method " +
+ "descriptor for wrong service type.");
+ }
+ switch(method.getIndex()) {
+ case 0:
+ return org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyResponseProto.getDefaultInstance();
+ default:
+ throw new java.lang.AssertionError("Can't get here.");
+ }
+ }
+
+ public static Stub newStub(
+ com.google.protobuf.RpcChannel channel) {
+ return new Stub(channel);
+ }
+
+ public static final class Stub extends org.apache.hadoop.ipc.protobuf.TestRpcServiceProtosLegacy.CustomProto implements Interface {
+ private Stub(com.google.protobuf.RpcChannel channel) {
+ this.channel = channel;
+ }
+
+ private final com.google.protobuf.RpcChannel channel;
+
+ public com.google.protobuf.RpcChannel getChannel() {
+ return channel;
+ }
+
+ public void ping(
+ com.google.protobuf.RpcController controller,
+ org.apache.hadoop.ipc.protobuf.TestProtosLegacy.EmptyRequestProto request,
+ com.google.protobuf.RpcCallback