HADOOP-18487. Make protobuf 2.5 an optional runtime dependency. (#4996)
Protobuf 2.5 JAR is no longer needed at runtime. The option common.protobuf.scope defines whether the protobuf 2.5.0 dependency is marked as provided or not. * New package org.apache.hadoop.ipc.internal for internal only protobuf classes ...with a ShadedProtobufHelper in there which has shaded protobuf refs only, so guaranteed not to need protobuf-2.5 on the CP * All uses of org.apache.hadoop.ipc.ProtobufHelper have been replaced by uses of org.apache.hadoop.ipc.internal.ShadedProtobufHelper * The scope of protobuf-2.5 is set by the option common.protobuf2.scope In this patch is it is still "compile" * There is explicit reference to it in modules where it may be needed. * The maven scope of the dependency can be set with the common.protobuf2.scope option. It can be set to "provided" in a build: -Dcommon.protobuf2.scope=provided * Add new ipc(callable) method to catch and convert shaded protobuf exceptions raised during invocation of the supplied lambda expression * This is adopted in the code where the migration is not traumatically over-complex. RouterAdminProtocolTranslatorPB is left alone for this reason. Contributed by Steve Loughran
This commit is contained in:
parent
81edbebdd8
commit
9bc159f4ac
24
BUILDING.txt
24
BUILDING.txt
@ -311,6 +311,30 @@ Maven build goals:
|
||||
package. This option requires that -Dpmdk.lib is specified. With -Dbundle.pmdk provided,
|
||||
the build will fail if -Dpmdk.lib is not specified.
|
||||
|
||||
Controlling the redistribution of the protobuf-2.5 dependency
|
||||
|
||||
The protobuf 2.5.0 library is used at compile time to compile the class
|
||||
org.apache.hadoop.ipc.ProtobufHelper; this class known to have been used by
|
||||
external projects in the past. Protobuf 2.5 is not used elsewhere in
|
||||
the Hadoop codebase; alongside the move to Protobuf 3.x a private successor
|
||||
class, org.apache.hadoop.ipc.internal.ShadedProtobufHelper is now used.
|
||||
|
||||
The hadoop-common JAR still declares a dependency on protobuf-2.5, but this
|
||||
is likely to change in the future. The maven scope of the dependency can be
|
||||
set with the common.protobuf2.scope option.
|
||||
It can be set to "provided" in a build:
|
||||
-Dcommon.protobuf2.scope=provided
|
||||
If this is done then protobuf-2.5.0.jar will no longer be exported as a dependency,
|
||||
and will then be omitted from the share/hadoop/common/lib/ directory of
|
||||
any Hadoop distribution built. Any application declaring a dependency on hadoop-commmon
|
||||
will no longer get the dependency; if they need it then they must explicitly declare it:
|
||||
|
||||
<dependency>
|
||||
<groupId>com.google.protobuf</groupId>
|
||||
<artifactId>protobuf-java</artifactId>
|
||||
<version>2.5.0</version>
|
||||
</dependency>
|
||||
|
||||
----------------------------------------------------------------------------------
|
||||
Building components separately
|
||||
|
||||
|
@ -451,8 +451,7 @@
|
||||
</Match>
|
||||
|
||||
<Match>
|
||||
<Class name="org.apache.hadoop.ipc.ProtobufHelper" />
|
||||
<Method name="getFixedByteString" />
|
||||
<Class name="org.apache.hadoop.ipc.internal.ShadedProtobufHelper" />
|
||||
<Bug pattern="AT_OPERATION_SEQUENCE_ON_CONCURRENT_ABSTRACTION" />
|
||||
</Match>
|
||||
</FindBugsFilter>
|
||||
|
@ -263,10 +263,11 @@
|
||||
<artifactId>re2j</artifactId>
|
||||
<scope>compile</scope>
|
||||
</dependency>
|
||||
<!-- Needed for compilation, though no longer in production. -->
|
||||
<dependency>
|
||||
<groupId>com.google.protobuf</groupId>
|
||||
<artifactId>protobuf-java</artifactId>
|
||||
<scope>compile</scope>
|
||||
<scope>${common.protobuf2.scope}</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>com.google.code.gson</groupId>
|
||||
@ -504,11 +505,11 @@
|
||||
<!--These classes have direct Protobuf references for backward compatibility reasons-->
|
||||
<excludes>
|
||||
<exclude>**/ProtobufHelper.java</exclude>
|
||||
<exclude>**/RpcWritable.java</exclude>
|
||||
<exclude>**/ProtobufRpcEngineCallback.java</exclude>
|
||||
<exclude>**/ProtobufRpcEngine.java</exclude>
|
||||
<exclude>**/ProtobufRpcEngine2.java</exclude>
|
||||
<exclude>**/ProtobufRpcEngineProtos.java</exclude>
|
||||
<exclude>**/ProtobufWrapperLegacy.java</exclude>
|
||||
</excludes>
|
||||
</configuration>
|
||||
</execution>
|
||||
|
@ -37,14 +37,13 @@
|
||||
import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToActiveRequestProto;
|
||||
import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToStandbyRequestProto;
|
||||
import org.apache.hadoop.ha.proto.HAServiceProtocolProtos.TransitionToObserverRequestProto;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtobufRpcEngine2;
|
||||
import org.apache.hadoop.ipc.ProtocolTranslator;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
|
||||
import org.apache.hadoop.thirdparty.protobuf.RpcController;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
|
||||
|
||||
/**
|
||||
* This class is the client side translator to translate the requests made on
|
||||
@ -84,60 +83,39 @@ public HAServiceProtocolClientSideTranslatorPB(
|
||||
|
||||
@Override
|
||||
public void monitorHealth() throws IOException {
|
||||
try {
|
||||
rpcProxy.monitorHealth(NULL_CONTROLLER, MONITOR_HEALTH_REQ);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
ipc(() -> rpcProxy.monitorHealth(NULL_CONTROLLER, MONITOR_HEALTH_REQ));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void transitionToActive(StateChangeRequestInfo reqInfo) throws IOException {
|
||||
try {
|
||||
TransitionToActiveRequestProto req =
|
||||
TransitionToActiveRequestProto.newBuilder()
|
||||
.setReqInfo(convert(reqInfo)).build();
|
||||
|
||||
rpcProxy.transitionToActive(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
ipc(() -> rpcProxy.transitionToActive(NULL_CONTROLLER, req));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void transitionToStandby(StateChangeRequestInfo reqInfo) throws IOException {
|
||||
try {
|
||||
TransitionToStandbyRequestProto req =
|
||||
TransitionToStandbyRequestProto.newBuilder()
|
||||
.setReqInfo(convert(reqInfo)).build();
|
||||
rpcProxy.transitionToStandby(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
ipc(() -> rpcProxy.transitionToStandby(NULL_CONTROLLER, req));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void transitionToObserver(StateChangeRequestInfo reqInfo)
|
||||
throws IOException {
|
||||
try {
|
||||
TransitionToObserverRequestProto req =
|
||||
TransitionToObserverRequestProto.newBuilder()
|
||||
.setReqInfo(convert(reqInfo)).build();
|
||||
rpcProxy.transitionToObserver(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
ipc(() -> rpcProxy.transitionToObserver(NULL_CONTROLLER, req));
|
||||
}
|
||||
|
||||
@Override
|
||||
public HAServiceStatus getServiceStatus() throws IOException {
|
||||
GetServiceStatusResponseProto status;
|
||||
try {
|
||||
status = rpcProxy.getServiceStatus(NULL_CONTROLLER,
|
||||
GET_SERVICE_STATUS_REQ);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
status = ipc(() -> rpcProxy.getServiceStatus(NULL_CONTROLLER,
|
||||
GET_SERVICE_STATUS_REQ));
|
||||
|
||||
HAServiceStatus ret = new HAServiceStatus(
|
||||
convert(status.getState()));
|
||||
|
@ -27,15 +27,14 @@
|
||||
import org.apache.hadoop.ha.ZKFCProtocol;
|
||||
import org.apache.hadoop.ha.proto.ZKFCProtocolProtos.CedeActiveRequestProto;
|
||||
import org.apache.hadoop.ha.proto.ZKFCProtocolProtos.GracefulFailoverRequestProto;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtobufRpcEngine2;
|
||||
import org.apache.hadoop.ipc.ProtocolTranslator;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.security.AccessControlException;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
|
||||
import org.apache.hadoop.thirdparty.protobuf.RpcController;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
|
||||
|
||||
|
||||
public class ZKFCProtocolClientSideTranslatorPB implements
|
||||
@ -57,24 +56,16 @@ public ZKFCProtocolClientSideTranslatorPB(
|
||||
@Override
|
||||
public void cedeActive(int millisToCede) throws IOException,
|
||||
AccessControlException {
|
||||
try {
|
||||
CedeActiveRequestProto req = CedeActiveRequestProto.newBuilder()
|
||||
.setMillisToCede(millisToCede)
|
||||
.build();
|
||||
rpcProxy.cedeActive(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
ipc(() -> rpcProxy.cedeActive(NULL_CONTROLLER, req));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void gracefulFailover() throws IOException, AccessControlException {
|
||||
try {
|
||||
rpcProxy.gracefulFailover(NULL_CONTROLLER,
|
||||
GracefulFailoverRequestProto.getDefaultInstance());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
ipc(() -> rpcProxy.gracefulFailover(NULL_CONTROLLER,
|
||||
GracefulFailoverRequestProto.getDefaultInstance()));
|
||||
}
|
||||
|
||||
|
||||
|
@ -18,10 +18,10 @@
|
||||
package org.apache.hadoop.ipc;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
|
||||
import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
|
||||
import org.apache.hadoop.security.token.Token;
|
||||
import org.apache.hadoop.security.token.TokenIdentifier;
|
||||
@ -30,10 +30,18 @@
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
/**
|
||||
* Helper methods for protobuf related RPC implementation
|
||||
* Helper methods for protobuf related RPC implementation.
|
||||
* This is deprecated because it references protobuf 2.5 classes
|
||||
* as well as the shaded ones -and so needs an unshaded protobuf-2.5
|
||||
* JAR on the classpath during execution.
|
||||
* It MUST NOT be used internally; it is retained in case existing,
|
||||
* external applications already use it.
|
||||
* @deprecated hadoop code MUST use {@link ShadedProtobufHelper}.
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
@Deprecated
|
||||
public class ProtobufHelper {
|
||||
|
||||
private ProtobufHelper() {
|
||||
// Hidden constructor for class with only static helper methods
|
||||
}
|
||||
@ -46,15 +54,13 @@ private ProtobufHelper() {
|
||||
* a new IOException that wraps the unexpected ServiceException.
|
||||
*/
|
||||
public static IOException getRemoteException(ServiceException se) {
|
||||
Throwable e = se.getCause();
|
||||
if (e == null) {
|
||||
return new IOException(se);
|
||||
}
|
||||
return e instanceof IOException ? (IOException) e : new IOException(se);
|
||||
return ShadedProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
|
||||
/**
|
||||
* Kept for backward compatible.
|
||||
* Extract the remote exception from an unshaded version of the protobuf
|
||||
* libraries.
|
||||
* Kept for backward compatibility.
|
||||
* Return the IOException thrown by the remote server wrapped in
|
||||
* ServiceException as cause.
|
||||
* @param se ServiceException that wraps IO exception thrown by the server
|
||||
@ -71,29 +77,13 @@ public static IOException getRemoteException(
|
||||
return e instanceof IOException ? (IOException) e : new IOException(se);
|
||||
}
|
||||
|
||||
/**
|
||||
* Map used to cache fixed strings to ByteStrings. Since there is no
|
||||
* automatic expiration policy, only use this for strings from a fixed, small
|
||||
* set.
|
||||
* <p/>
|
||||
* This map should not be accessed directly. Used the getFixedByteString
|
||||
* methods instead.
|
||||
*/
|
||||
private final static ConcurrentHashMap<Object, ByteString>
|
||||
FIXED_BYTESTRING_CACHE = new ConcurrentHashMap<>();
|
||||
|
||||
/**
|
||||
* Get the ByteString for frequently used fixed and small set strings.
|
||||
* @param key string
|
||||
* @return the ByteString for frequently used fixed and small set strings.
|
||||
*/
|
||||
public static ByteString getFixedByteString(Text key) {
|
||||
ByteString value = FIXED_BYTESTRING_CACHE.get(key);
|
||||
if (value == null) {
|
||||
value = ByteString.copyFromUtf8(key.toString());
|
||||
FIXED_BYTESTRING_CACHE.put(new Text(key.copyBytes()), value);
|
||||
}
|
||||
return value;
|
||||
return ShadedProtobufHelper.getFixedByteString(key);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -102,34 +92,40 @@ public static ByteString getFixedByteString(Text key) {
|
||||
* @return ByteString for frequently used fixed and small set strings.
|
||||
*/
|
||||
public static ByteString getFixedByteString(String key) {
|
||||
ByteString value = FIXED_BYTESTRING_CACHE.get(key);
|
||||
if (value == null) {
|
||||
value = ByteString.copyFromUtf8(key);
|
||||
FIXED_BYTESTRING_CACHE.put(key, value);
|
||||
}
|
||||
return value;
|
||||
return ShadedProtobufHelper.getFixedByteString(key);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the byte string of a non-null byte array.
|
||||
* If the array is 0 bytes long, return a singleton to reduce object allocation.
|
||||
* @param bytes bytes to convert.
|
||||
* @return a value
|
||||
*/
|
||||
public static ByteString getByteString(byte[] bytes) {
|
||||
// return singleton to reduce object allocation
|
||||
return (bytes.length == 0) ? ByteString.EMPTY : ByteString.copyFrom(bytes);
|
||||
return ShadedProtobufHelper.getByteString(bytes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a token from a TokenProto payload.
|
||||
* @param tokenProto marshalled token
|
||||
* @return the token.
|
||||
*/
|
||||
public static Token<? extends TokenIdentifier> tokenFromProto(
|
||||
TokenProto tokenProto) {
|
||||
Token<? extends TokenIdentifier> token = new Token<>(
|
||||
tokenProto.getIdentifier().toByteArray(),
|
||||
tokenProto.getPassword().toByteArray(), new Text(tokenProto.getKind()),
|
||||
new Text(tokenProto.getService()));
|
||||
return token;
|
||||
return ShadedProtobufHelper.tokenFromProto(tokenProto);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a {@code TokenProto} instance
|
||||
* from a hadoop token.
|
||||
* This builds and caches the fields
|
||||
* (identifier, password, kind, service) but not
|
||||
* renewer or any payload.
|
||||
* @param tok token
|
||||
* @return a marshallable protobuf class.
|
||||
*/
|
||||
public static TokenProto protoFromToken(Token<?> tok) {
|
||||
TokenProto.Builder builder = TokenProto.newBuilder().
|
||||
setIdentifier(getByteString(tok.getIdentifier())).
|
||||
setPassword(getByteString(tok.getPassword())).
|
||||
setKindBytes(getFixedByteString(tok.getKind())).
|
||||
setServiceBytes(getFixedByteString(tok.getService()));
|
||||
return builder.build();
|
||||
return ShadedProtobufHelper.protoFromToken(tok);
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,125 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.ipc;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import java.util.concurrent.atomic.AtomicBoolean;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.util.Preconditions;
|
||||
|
||||
/**
|
||||
* A RpcWritable wrapper for unshaded protobuf messages.
|
||||
* This class isolates unshaded protobuf classes from
|
||||
* the rest of the RPC codebase, so it can operate without
|
||||
* needing that on the classpath <i>at runtime</i>.
|
||||
* The classes are needed at compile time; and if
|
||||
* unshaded protobuf messages are to be marshalled, they
|
||||
* will need to be on the classpath then.
|
||||
* That is implicit: it is impossible to pass in a class
|
||||
* which is a protobuf message unless that condition is met.
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
public class ProtobufWrapperLegacy extends RpcWritable {
|
||||
|
||||
private com.google.protobuf.Message message;
|
||||
|
||||
/**
|
||||
* Construct.
|
||||
* The type of the parameter is Object so as to keep the casting internal
|
||||
* to this class.
|
||||
* @param message message to wrap.
|
||||
* @throws IllegalArgumentException if the class is not a protobuf message.
|
||||
*/
|
||||
public ProtobufWrapperLegacy(Object message) {
|
||||
Preconditions.checkArgument(isUnshadedProtobufMessage(message),
|
||||
"message class is not an unshaded protobuf message %s",
|
||||
message.getClass());
|
||||
this.message = (com.google.protobuf.Message) message;
|
||||
}
|
||||
|
||||
public com.google.protobuf.Message getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void writeTo(ResponseBuffer out) throws IOException {
|
||||
int length = message.getSerializedSize();
|
||||
length += com.google.protobuf.CodedOutputStream.
|
||||
computeUInt32SizeNoTag(length);
|
||||
out.ensureCapacity(length);
|
||||
message.writeDelimitedTo(out);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
protected <T> T readFrom(ByteBuffer bb) throws IOException {
|
||||
// using the parser with a byte[]-backed coded input stream is the
|
||||
// most efficient way to deserialize a protobuf. it has a direct
|
||||
// path to the PB ctor that doesn't create multi-layered streams
|
||||
// that internally buffer.
|
||||
com.google.protobuf.CodedInputStream cis =
|
||||
com.google.protobuf.CodedInputStream.newInstance(
|
||||
bb.array(), bb.position() + bb.arrayOffset(), bb.remaining());
|
||||
try {
|
||||
cis.pushLimit(cis.readRawVarint32());
|
||||
message = message.getParserForType().parseFrom(cis);
|
||||
cis.checkLastTagWas(0);
|
||||
} finally {
|
||||
// advance over the bytes read.
|
||||
bb.position(bb.position() + cis.getTotalBytesRead());
|
||||
}
|
||||
return (T) message;
|
||||
}
|
||||
|
||||
/**
|
||||
* Has protobuf been looked for and is known as absent?
|
||||
* Saves a check on every message.
|
||||
*/
|
||||
private static final AtomicBoolean PROTOBUF_KNOWN_NOT_FOUND =
|
||||
new AtomicBoolean(false);
|
||||
|
||||
/**
|
||||
* Is a message an unshaded protobuf message?
|
||||
* @param payload payload
|
||||
* @return true if protobuf.jar is on the classpath and the payload is a Message
|
||||
*/
|
||||
public static boolean isUnshadedProtobufMessage(Object payload) {
|
||||
if (PROTOBUF_KNOWN_NOT_FOUND.get()) {
|
||||
// protobuf is known to be absent. fail fast without examining
|
||||
// jars or generating exceptions.
|
||||
return false;
|
||||
}
|
||||
// load the protobuf message class.
|
||||
// if it does not load, then the payload is guaranteed not to be
|
||||
// an unshaded protobuf message
|
||||
// this relies on classloader caching for performance
|
||||
try {
|
||||
Class<?> protobufMessageClazz =
|
||||
Class.forName("com.google.protobuf.Message");
|
||||
return protobufMessageClazz.isAssignableFrom(payload.getClass());
|
||||
} catch (ClassNotFoundException e) {
|
||||
PROTOBUF_KNOWN_NOT_FOUND.set(true);
|
||||
return false;
|
||||
}
|
||||
|
||||
}
|
||||
}
|
@ -31,9 +31,9 @@
|
||||
import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.GetProtocolSignatureResponseProto;
|
||||
import org.apache.hadoop.ipc.protobuf.ProtocolInfoProtos.ProtocolSignatureProto;
|
||||
import org.apache.hadoop.net.NetUtils;
|
||||
|
||||
import org.apache.hadoop.thirdparty.protobuf.RpcController;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
|
||||
|
||||
/**
|
||||
* This class maintains a cache of protocol versions and corresponding protocol
|
||||
@ -122,12 +122,8 @@ public static boolean isMethodSupported(Object rpcProxy, Class<?> protocol,
|
||||
builder.setProtocol(protocol.getName());
|
||||
builder.setRpcKind(rpcKind.toString());
|
||||
GetProtocolSignatureResponseProto resp;
|
||||
try {
|
||||
resp = protocolInfoProxy.getProtocolSignature(NULL_CONTROLLER,
|
||||
builder.build());
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
resp = ipc(() -> protocolInfoProxy.getProtocolSignature(NULL_CONTROLLER,
|
||||
builder.build()));
|
||||
versionMap = convertProtocolSignatureProtos(resp
|
||||
.getProtocolSignatureList());
|
||||
putVersionSignatureMap(serverAddress, protocol.getName(),
|
||||
|
@ -41,9 +41,11 @@ static RpcWritable wrap(Object o) {
|
||||
if (o instanceof RpcWritable) {
|
||||
return (RpcWritable)o;
|
||||
} else if (o instanceof Message) {
|
||||
// hadoop shaded protobuf
|
||||
return new ProtobufWrapper((Message)o);
|
||||
} else if (o instanceof com.google.protobuf.Message) {
|
||||
return new ProtobufWrapperLegacy((com.google.protobuf.Message) o);
|
||||
} else if (ProtobufWrapperLegacy.isUnshadedProtobufMessage(o)) {
|
||||
// unshaded protobuf
|
||||
return new ProtobufWrapperLegacy(o);
|
||||
} else if (o instanceof Writable) {
|
||||
return new WritableWrapper((Writable)o);
|
||||
}
|
||||
@ -134,49 +136,6 @@ <T> T readFrom(ByteBuffer bb) throws IOException {
|
||||
}
|
||||
}
|
||||
|
||||
// adapter for Protobufs.
|
||||
static class ProtobufWrapperLegacy extends RpcWritable {
|
||||
private com.google.protobuf.Message message;
|
||||
|
||||
ProtobufWrapperLegacy(com.google.protobuf.Message message) {
|
||||
this.message = message;
|
||||
}
|
||||
|
||||
com.google.protobuf.Message getMessage() {
|
||||
return message;
|
||||
}
|
||||
|
||||
@Override
|
||||
void writeTo(ResponseBuffer out) throws IOException {
|
||||
int length = message.getSerializedSize();
|
||||
length += com.google.protobuf.CodedOutputStream.
|
||||
computeUInt32SizeNoTag(length);
|
||||
out.ensureCapacity(length);
|
||||
message.writeDelimitedTo(out);
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
<T> T readFrom(ByteBuffer bb) throws IOException {
|
||||
// using the parser with a byte[]-backed coded input stream is the
|
||||
// most efficient way to deserialize a protobuf. it has a direct
|
||||
// path to the PB ctor that doesn't create multi-layered streams
|
||||
// that internally buffer.
|
||||
com.google.protobuf.CodedInputStream cis =
|
||||
com.google.protobuf.CodedInputStream.newInstance(
|
||||
bb.array(), bb.position() + bb.arrayOffset(), bb.remaining());
|
||||
try {
|
||||
cis.pushLimit(cis.readRawVarint32());
|
||||
message = message.getParserForType().parseFrom(cis);
|
||||
cis.checkLastTagWas(0);
|
||||
} finally {
|
||||
// advance over the bytes read.
|
||||
bb.position(bb.position() + cis.getTotalBytesRead());
|
||||
}
|
||||
return (T)message;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* adapter to allow decoding of writables and protobufs from a byte buffer.
|
||||
*/
|
||||
|
@ -0,0 +1,170 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.ipc.internal;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
|
||||
import org.apache.hadoop.security.token.Token;
|
||||
import org.apache.hadoop.security.token.TokenIdentifier;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ByteString;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
/**
|
||||
* Helper methods for protobuf related RPC implementation using the
|
||||
* hadoop {@code org.apache.hadoop.thirdparty.protobuf} shaded version.
|
||||
* This is <i>absolutely private to hadoop-* modules</i>.
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Unstable
|
||||
public final class ShadedProtobufHelper {
|
||||
|
||||
private ShadedProtobufHelper() {
|
||||
// Hidden constructor for class with only static helper methods
|
||||
}
|
||||
|
||||
/**
|
||||
* Return the IOException thrown by the remote server wrapped in
|
||||
* ServiceException as cause.
|
||||
* The signature of this method changes with updates to the hadoop-thirdparty
|
||||
* shaded protobuf library.
|
||||
* @param se ServiceException that wraps IO exception thrown by the server
|
||||
* @return Exception wrapped in ServiceException or
|
||||
* a new IOException that wraps the unexpected ServiceException.
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Unstable
|
||||
public static IOException getRemoteException(ServiceException se) {
|
||||
Throwable e = se.getCause();
|
||||
if (e == null) {
|
||||
return new IOException(se);
|
||||
}
|
||||
return e instanceof IOException
|
||||
? (IOException) e
|
||||
: new IOException(se);
|
||||
}
|
||||
|
||||
/**
|
||||
* Map used to cache fixed strings to ByteStrings. Since there is no
|
||||
* automatic expiration policy, only use this for strings from a fixed, small
|
||||
* set.
|
||||
* <p>
|
||||
* This map should not be accessed directly. Used the getFixedByteString
|
||||
* methods instead.
|
||||
*/
|
||||
private static final ConcurrentHashMap<Object, ByteString>
|
||||
FIXED_BYTESTRING_CACHE = new ConcurrentHashMap<>();
|
||||
|
||||
/**
|
||||
* Get the ByteString for frequently used fixed and small set strings.
|
||||
* @param key Hadoop Writable Text string
|
||||
* @return the ByteString for frequently used fixed and small set strings.
|
||||
*/
|
||||
public static ByteString getFixedByteString(Text key) {
|
||||
ByteString value = FIXED_BYTESTRING_CACHE.get(key);
|
||||
if (value == null) {
|
||||
value = ByteString.copyFromUtf8(key.toString());
|
||||
FIXED_BYTESTRING_CACHE.put(new Text(key.copyBytes()), value);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the ByteString for frequently used fixed and small set strings.
|
||||
* @param key string
|
||||
* @return ByteString for frequently used fixed and small set strings.
|
||||
*/
|
||||
public static ByteString getFixedByteString(String key) {
|
||||
ByteString value = FIXED_BYTESTRING_CACHE.get(key);
|
||||
if (value == null) {
|
||||
value = ByteString.copyFromUtf8(key);
|
||||
FIXED_BYTESTRING_CACHE.put(key, value);
|
||||
}
|
||||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get the byte string of a non-null byte array.
|
||||
* If the array is 0 bytes long, return a singleton to reduce object allocation.
|
||||
* @param bytes bytes to convert.
|
||||
* @return the protobuf byte string representation of the array.
|
||||
*/
|
||||
public static ByteString getByteString(byte[] bytes) {
|
||||
// return singleton to reduce object allocation
|
||||
return (bytes.length == 0)
|
||||
? ByteString.EMPTY
|
||||
: ByteString.copyFrom(bytes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a hadoop token from a protobuf token.
|
||||
* @param tokenProto token
|
||||
* @return a new token
|
||||
*/
|
||||
public static Token<? extends TokenIdentifier> tokenFromProto(
|
||||
TokenProto tokenProto) {
|
||||
Token<? extends TokenIdentifier> token = new Token<>(
|
||||
tokenProto.getIdentifier().toByteArray(),
|
||||
tokenProto.getPassword().toByteArray(),
|
||||
new Text(tokenProto.getKind()),
|
||||
new Text(tokenProto.getService()));
|
||||
return token;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a {@code TokenProto} instance
|
||||
* from a hadoop token.
|
||||
* This builds and caches the fields
|
||||
* (identifier, password, kind, service) but not
|
||||
* renewer or any payload.
|
||||
* @param tok token
|
||||
* @return a marshallable protobuf class.
|
||||
*/
|
||||
public static TokenProto protoFromToken(Token<?> tok) {
|
||||
TokenProto.Builder builder = TokenProto.newBuilder().
|
||||
setIdentifier(getByteString(tok.getIdentifier())).
|
||||
setPassword(getByteString(tok.getPassword())).
|
||||
setKindBytes(getFixedByteString(tok.getKind())).
|
||||
setServiceBytes(getFixedByteString(tok.getService()));
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Evaluate a protobuf call, converting any ServiceException to an IOException.
|
||||
* @param call invocation to make
|
||||
* @return the result of the call
|
||||
* @param <T> type of the result
|
||||
* @throws IOException any translated protobuf exception
|
||||
*/
|
||||
public static <T> T ipc(IpcCall<T> call) throws IOException {
|
||||
try {
|
||||
return call.call();
|
||||
} catch (ServiceException e) {
|
||||
throw getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@FunctionalInterface
|
||||
public interface IpcCall<T> {
|
||||
T call() throws ServiceException;
|
||||
}
|
||||
}
|
@ -0,0 +1,28 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
/**
|
||||
* IPC internal classes not for any use by libraries outside
|
||||
* the apache hadoop source tree.
|
||||
*/
|
||||
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce", "YARN"})
|
||||
@InterfaceStability.Unstable
|
||||
package org.apache.hadoop.ipc.internal;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
@ -18,8 +18,12 @@
|
||||
|
||||
/**
|
||||
* Tools to help define network clients and servers.
|
||||
* Other ASF projects use this package, often with their own shaded/unshaded
|
||||
* versions of protobuf messages.
|
||||
* Changes to the API signatures will break things, especially changes to
|
||||
* {@link org.apache.hadoop.ipc.RPC} and {@link org.apache.hadoop.ipc.RpcEngine}.
|
||||
*/
|
||||
@InterfaceAudience.LimitedPrivate({"HBase", "HDFS", "MapReduce"})
|
||||
@InterfaceAudience.LimitedPrivate({"HBase", "HDFS", "MapReduce", "YARN", "Hive", "Ozone"})
|
||||
@InterfaceStability.Evolving
|
||||
package org.apache.hadoop.ipc;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
|
@ -25,7 +25,6 @@
|
||||
import java.util.Collection;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ipc.RefreshResponse;
|
||||
@ -34,9 +33,9 @@
|
||||
import org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshRequestProto;
|
||||
import org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseProto;
|
||||
import org.apache.hadoop.ipc.proto.GenericRefreshProtocolProtos.GenericRefreshResponseCollectionProto;
|
||||
|
||||
import org.apache.hadoop.thirdparty.protobuf.RpcController;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
|
||||
|
||||
public class GenericRefreshProtocolClientSideTranslatorPB implements
|
||||
ProtocolMetaInterface, GenericRefreshProtocol, Closeable {
|
||||
@ -59,17 +58,13 @@ public void close() throws IOException {
|
||||
public Collection<RefreshResponse> refresh(String identifier, String[] args) throws IOException {
|
||||
List<String> argList = Arrays.asList(args);
|
||||
|
||||
try {
|
||||
GenericRefreshRequestProto request = GenericRefreshRequestProto.newBuilder()
|
||||
.setIdentifier(identifier)
|
||||
.addAllArgs(argList)
|
||||
.build();
|
||||
.setIdentifier(identifier).addAllArgs(argList).build();
|
||||
|
||||
GenericRefreshResponseCollectionProto resp = rpcProxy.refresh(NULL_CONTROLLER, request);
|
||||
GenericRefreshResponseCollectionProto resp = ipc(() ->
|
||||
rpcProxy.refresh(NULL_CONTROLLER, request));
|
||||
return unpack(resp);
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
private Collection<RefreshResponse> unpack(GenericRefreshResponseCollectionProto collection) {
|
||||
|
@ -21,16 +21,14 @@
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ipc.RpcClientUtil;
|
||||
import org.apache.hadoop.ipc.RefreshCallQueueProtocol;
|
||||
import org.apache.hadoop.ipc.proto.RefreshCallQueueProtocolProtos.RefreshCallQueueRequestProto;
|
||||
import org.apache.hadoop.ipc.protocolPB.RefreshCallQueueProtocolPB;
|
||||
|
||||
import org.apache.hadoop.thirdparty.protobuf.RpcController;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
|
||||
|
||||
public class RefreshCallQueueProtocolClientSideTranslatorPB implements
|
||||
ProtocolMetaInterface, RefreshCallQueueProtocol, Closeable {
|
||||
@ -55,12 +53,8 @@ public void close() throws IOException {
|
||||
|
||||
@Override
|
||||
public void refreshCallQueue() throws IOException {
|
||||
try {
|
||||
rpcProxy.refreshCallQueue(NULL_CONTROLLER,
|
||||
VOID_REFRESH_CALL_QUEUE_REQUEST);
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
ipc(() -> rpcProxy.refreshCallQueue(NULL_CONTROLLER,
|
||||
VOID_REFRESH_CALL_QUEUE_REQUEST));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -18,6 +18,7 @@
|
||||
|
||||
package org.apache.hadoop.security;
|
||||
|
||||
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ByteString;
|
||||
|
||||
import java.io.BufferedInputStream;
|
||||
@ -46,7 +47,6 @@
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.io.Writable;
|
||||
import org.apache.hadoop.io.WritableUtils;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.security.token.Token;
|
||||
import org.apache.hadoop.security.token.TokenIdentifier;
|
||||
import org.apache.hadoop.security.proto.SecurityProtos.CredentialsKVProto;
|
||||
@ -382,7 +382,7 @@ void writeProto(DataOutput out) throws IOException {
|
||||
CredentialsKVProto.Builder kv = CredentialsKVProto.newBuilder().
|
||||
setAliasBytes(ByteString.copyFrom(
|
||||
e.getKey().getBytes(), 0, e.getKey().getLength())).
|
||||
setToken(ProtobufHelper.protoFromToken(e.getValue()));
|
||||
setToken(ShadedProtobufHelper.protoFromToken(e.getValue()));
|
||||
storage.addTokens(kv.build());
|
||||
}
|
||||
|
||||
@ -404,7 +404,7 @@ void readProto(DataInput in) throws IOException {
|
||||
CredentialsProto storage = CredentialsProto.parseDelimitedFrom((DataInputStream)in);
|
||||
for (CredentialsKVProto kv : storage.getTokensList()) {
|
||||
addToken(new Text(kv.getAliasBytes().toByteArray()),
|
||||
ProtobufHelper.tokenFromProto(kv.getToken()));
|
||||
ShadedProtobufHelper.tokenFromProto(kv.getToken()));
|
||||
}
|
||||
for (CredentialsKVProto kv : storage.getSecretsList()) {
|
||||
addSecretKey(new Text(kv.getAliasBytes().toByteArray()),
|
||||
|
@ -21,16 +21,14 @@
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ipc.RpcClientUtil;
|
||||
import org.apache.hadoop.security.authorize.RefreshAuthorizationPolicyProtocol;
|
||||
import org.apache.hadoop.security.proto.RefreshAuthorizationPolicyProtocolProtos.RefreshServiceAclRequestProto;
|
||||
import org.apache.hadoop.security.protocolPB.RefreshAuthorizationPolicyProtocolPB;
|
||||
|
||||
import org.apache.hadoop.thirdparty.protobuf.RpcController;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
|
||||
|
||||
public class RefreshAuthorizationPolicyProtocolClientSideTranslatorPB implements
|
||||
ProtocolMetaInterface, RefreshAuthorizationPolicyProtocol, Closeable {
|
||||
@ -55,12 +53,8 @@ public void close() throws IOException {
|
||||
|
||||
@Override
|
||||
public void refreshServiceAcl() throws IOException {
|
||||
try {
|
||||
rpcProxy.refreshServiceAcl(NULL_CONTROLLER,
|
||||
VOID_REFRESH_SERVICE_ACL_REQUEST);
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
ipc(() -> rpcProxy.refreshServiceAcl(NULL_CONTROLLER,
|
||||
VOID_REFRESH_SERVICE_ACL_REQUEST));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -21,16 +21,15 @@
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ipc.RpcClientUtil;
|
||||
import org.apache.hadoop.security.RefreshUserMappingsProtocol;
|
||||
import org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos.RefreshSuperUserGroupsConfigurationRequestProto;
|
||||
import org.apache.hadoop.security.proto.RefreshUserMappingsProtocolProtos.RefreshUserToGroupsMappingsRequestProto;
|
||||
|
||||
import org.apache.hadoop.thirdparty.protobuf.RpcController;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
|
||||
|
||||
public class RefreshUserMappingsProtocolClientSideTranslatorPB implements
|
||||
ProtocolMetaInterface, RefreshUserMappingsProtocol, Closeable {
|
||||
@ -59,22 +58,14 @@ public void close() throws IOException {
|
||||
|
||||
@Override
|
||||
public void refreshUserToGroupsMappings() throws IOException {
|
||||
try {
|
||||
rpcProxy.refreshUserToGroupsMappings(NULL_CONTROLLER,
|
||||
VOID_REFRESH_USER_TO_GROUPS_MAPPING_REQUEST);
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
ipc(() -> rpcProxy.refreshUserToGroupsMappings(NULL_CONTROLLER,
|
||||
VOID_REFRESH_USER_TO_GROUPS_MAPPING_REQUEST));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void refreshSuperUserGroupsConfiguration() throws IOException {
|
||||
try {
|
||||
rpcProxy.refreshSuperUserGroupsConfiguration(NULL_CONTROLLER,
|
||||
VOID_REFRESH_SUPERUSER_GROUPS_CONFIGURATION_REQUEST);
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
ipc(() -> rpcProxy.refreshSuperUserGroupsConfiguration(NULL_CONTROLLER,
|
||||
VOID_REFRESH_SUPERUSER_GROUPS_CONFIGURATION_REQUEST));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -20,7 +20,7 @@
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ipc.RpcClientUtil;
|
||||
@ -29,7 +29,8 @@
|
||||
import org.apache.hadoop.tools.proto.GetUserMappingsProtocolProtos.GetGroupsForUserResponseProto;
|
||||
|
||||
import org.apache.hadoop.thirdparty.protobuf.RpcController;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
|
||||
|
||||
public class GetUserMappingsProtocolClientSideTranslatorPB implements
|
||||
ProtocolMetaInterface, GetUserMappingsProtocol, Closeable {
|
||||
@ -53,11 +54,7 @@ public String[] getGroupsForUser(String user) throws IOException {
|
||||
GetGroupsForUserRequestProto request = GetGroupsForUserRequestProto
|
||||
.newBuilder().setUser(user).build();
|
||||
GetGroupsForUserResponseProto resp;
|
||||
try {
|
||||
resp = rpcProxy.getGroupsForUser(NULL_CONTROLLER, request);
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
resp = ipc(() -> rpcProxy.getGroupsForUser(NULL_CONTROLLER, request));
|
||||
return resp.getGroupsList().toArray(new String[resp.getGroupsCount()]);
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,89 @@
|
||||
/*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.ipc;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
|
||||
import org.apache.hadoop.test.AbstractHadoopTestBase;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
|
||||
import static org.apache.hadoop.test.LambdaTestUtils.intercept;
|
||||
import static org.apache.hadoop.test.LambdaTestUtils.verifyCause;
|
||||
|
||||
/**
|
||||
* Test methods in {@link ShadedProtobufHelper}.
|
||||
*/
|
||||
public class TestShadedProtobufHelper extends AbstractHadoopTestBase {
|
||||
|
||||
@Test
|
||||
public void testExtractRemoteExceptionNoCause() throws Throwable {
|
||||
ServiceException source = new ServiceException("empty");
|
||||
|
||||
IOException ex = ShadedProtobufHelper.getRemoteException(source);
|
||||
verifyCause(ServiceException.class, ex);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExtractRemoteExceptionIOECause() throws Throwable {
|
||||
IOException source = new IOException("ioe");
|
||||
|
||||
IOException ex = ShadedProtobufHelper.getRemoteException(
|
||||
new ServiceException(source));
|
||||
// if not the same, throw
|
||||
if (!(ex == source)) {
|
||||
throw ex;
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testExtractRemoteExceptionOtherCause() throws Throwable {
|
||||
NullPointerException source = new NullPointerException("npe");
|
||||
|
||||
IOException ex = ShadedProtobufHelper.getRemoteException(
|
||||
new ServiceException(source));
|
||||
// if not the same, throw
|
||||
ServiceException c1 = verifyCause(ServiceException.class, ex);
|
||||
verifyCause(NullPointerException.class, c1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIPCWrapperServiceException() throws Throwable {
|
||||
intercept(IOException.class, "expected", () -> {
|
||||
ipc(() -> {
|
||||
throw new ServiceException("expected");
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIPCWrapperNPE() throws Throwable {
|
||||
final IOException ex = intercept(IOException.class, "npe", () -> {
|
||||
ipc(() -> {
|
||||
throw new ServiceException(new NullPointerException("npe"));
|
||||
});
|
||||
});
|
||||
ServiceException c1 = verifyCause(ServiceException.class, ex);
|
||||
verifyCause(NullPointerException.class, c1);
|
||||
}
|
||||
|
||||
}
|
@ -819,7 +819,7 @@ public static <E extends Throwable> E verifyCause(
|
||||
if (cause == null || !clazz.isAssignableFrom(cause.getClass())) {
|
||||
throw caught;
|
||||
} else {
|
||||
return (E) caught;
|
||||
return (E) cause;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -67,7 +67,6 @@
|
||||
import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
|
||||
import org.apache.hadoop.hdfs.server.datanode.DiskBalancerWorkStatus;
|
||||
import org.apache.hadoop.hdfs.server.datanode.DiskBalancerWorkStatus.Result;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtobufRpcEngine2;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.ProtocolTranslator;
|
||||
@ -76,13 +75,13 @@
|
||||
import org.apache.hadoop.net.NetUtils;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hadoop.security.token.Token;
|
||||
|
||||
import org.apache.hadoop.thirdparty.protobuf.RpcController;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
|
||||
|
||||
/**
|
||||
* This class is the client side translator to translate the requests made on
|
||||
* {@link ClientDatanodeProtocol} interfaces to the RPC server implementing
|
||||
@ -197,31 +196,19 @@ public long getReplicaVisibleLength(ExtendedBlock b) throws IOException {
|
||||
GetReplicaVisibleLengthRequestProto req =
|
||||
GetReplicaVisibleLengthRequestProto.newBuilder()
|
||||
.setBlock(PBHelperClient.convert(b)).build();
|
||||
try {
|
||||
return rpcProxy.getReplicaVisibleLength(NULL_CONTROLLER, req).getLength();
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
return ipc(() -> rpcProxy.getReplicaVisibleLength(NULL_CONTROLLER, req).getLength());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void refreshNamenodes() throws IOException {
|
||||
try {
|
||||
rpcProxy.refreshNamenodes(NULL_CONTROLLER, VOID_REFRESH_NAMENODES);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
ipc(() -> rpcProxy.refreshNamenodes(NULL_CONTROLLER, VOID_REFRESH_NAMENODES));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void deleteBlockPool(String bpid, boolean force) throws IOException {
|
||||
DeleteBlockPoolRequestProto req = DeleteBlockPoolRequestProto.newBuilder()
|
||||
.setBlockPool(bpid).setForce(force).build();
|
||||
try {
|
||||
rpcProxy.deleteBlockPool(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
ipc(() -> rpcProxy.deleteBlockPool(NULL_CONTROLLER, req));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -232,11 +219,7 @@ public BlockLocalPathInfo getBlockLocalPathInfo(ExtendedBlock block,
|
||||
.setBlock(PBHelperClient.convert(block))
|
||||
.setToken(PBHelperClient.convert(token)).build();
|
||||
GetBlockLocalPathInfoResponseProto resp;
|
||||
try {
|
||||
resp = rpcProxy.getBlockLocalPathInfo(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
resp = ipc(() -> rpcProxy.getBlockLocalPathInfo(NULL_CONTROLLER, req));
|
||||
return new BlockLocalPathInfo(PBHelperClient.convert(resp.getBlock()),
|
||||
resp.getLocalPath(), resp.getLocalMetaPath());
|
||||
}
|
||||
@ -257,94 +240,61 @@ public Object getUnderlyingProxyObject() {
|
||||
public void shutdownDatanode(boolean forUpgrade) throws IOException {
|
||||
ShutdownDatanodeRequestProto request = ShutdownDatanodeRequestProto
|
||||
.newBuilder().setForUpgrade(forUpgrade).build();
|
||||
try {
|
||||
rpcProxy.shutdownDatanode(NULL_CONTROLLER, request);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
ipc(() -> rpcProxy.shutdownDatanode(NULL_CONTROLLER, request));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void evictWriters() throws IOException {
|
||||
try {
|
||||
rpcProxy.evictWriters(NULL_CONTROLLER, VOID_EVICT_WRITERS);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
ipc(() -> rpcProxy.evictWriters(NULL_CONTROLLER, VOID_EVICT_WRITERS));
|
||||
}
|
||||
|
||||
@Override
|
||||
public DatanodeLocalInfo getDatanodeInfo() throws IOException {
|
||||
GetDatanodeInfoResponseProto response;
|
||||
try {
|
||||
response = rpcProxy.getDatanodeInfo(NULL_CONTROLLER,
|
||||
VOID_GET_DATANODE_INFO);
|
||||
response = ipc(() -> rpcProxy.getDatanodeInfo(NULL_CONTROLLER,
|
||||
VOID_GET_DATANODE_INFO));
|
||||
return PBHelperClient.convert(response.getLocalInfo());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void startReconfiguration() throws IOException {
|
||||
try {
|
||||
rpcProxy.startReconfiguration(NULL_CONTROLLER, VOID_START_RECONFIG);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
ipc(() -> rpcProxy.startReconfiguration(NULL_CONTROLLER, VOID_START_RECONFIG));
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReconfigurationTaskStatus getReconfigurationStatus()
|
||||
throws IOException {
|
||||
try {
|
||||
return ReconfigurationProtocolUtils.getReconfigurationStatus(
|
||||
rpcProxy
|
||||
.getReconfigurationStatus(
|
||||
ipc(() -> rpcProxy.getReconfigurationStatus(
|
||||
NULL_CONTROLLER,
|
||||
VOID_GET_RECONFIG_STATUS));
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
VOID_GET_RECONFIG_STATUS)));
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> listReconfigurableProperties() throws IOException {
|
||||
ListReconfigurablePropertiesResponseProto response;
|
||||
try {
|
||||
response = rpcProxy.listReconfigurableProperties(NULL_CONTROLLER,
|
||||
VOID_LIST_RECONFIGURABLE_PROPERTIES);
|
||||
response = ipc(() -> rpcProxy.listReconfigurableProperties(NULL_CONTROLLER,
|
||||
VOID_LIST_RECONFIGURABLE_PROPERTIES));
|
||||
return response.getNameList();
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void triggerBlockReport(BlockReportOptions options)
|
||||
throws IOException {
|
||||
try {
|
||||
TriggerBlockReportRequestProto.Builder builder = TriggerBlockReportRequestProto.newBuilder().
|
||||
setIncremental(options.isIncremental());
|
||||
if (options.getNamenodeAddr() != null) {
|
||||
builder.setNnAddress(NetUtils.getHostPortString(options.getNamenodeAddr()));
|
||||
}
|
||||
rpcProxy.triggerBlockReport(NULL_CONTROLLER, builder.build());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
ipc(() -> rpcProxy.triggerBlockReport(NULL_CONTROLLER, builder.build()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getBalancerBandwidth() throws IOException {
|
||||
GetBalancerBandwidthResponseProto response;
|
||||
try {
|
||||
response = rpcProxy.getBalancerBandwidth(NULL_CONTROLLER,
|
||||
VOID_GET_BALANCER_BANDWIDTH);
|
||||
response = ipc(() -> rpcProxy.getBalancerBandwidth(NULL_CONTROLLER,
|
||||
VOID_GET_BALANCER_BANDWIDTH));
|
||||
return response.getBandwidth();
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@ -363,7 +313,6 @@ public long getBalancerBandwidth() throws IOException {
|
||||
public void submitDiskBalancerPlan(String planID, long planVersion,
|
||||
String planFile, String planData, boolean skipDateCheck)
|
||||
throws IOException {
|
||||
try {
|
||||
SubmitDiskBalancerPlanRequestProto request =
|
||||
SubmitDiskBalancerPlanRequestProto.newBuilder()
|
||||
.setPlanID(planID)
|
||||
@ -372,10 +321,7 @@ public void submitDiskBalancerPlan(String planID, long planVersion,
|
||||
.setPlan(planData)
|
||||
.setIgnoreDateCheck(skipDateCheck)
|
||||
.build();
|
||||
rpcProxy.submitDiskBalancerPlan(NULL_CONTROLLER, request);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
ipc(() -> rpcProxy.submitDiskBalancerPlan(NULL_CONTROLLER, request));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -387,13 +333,9 @@ public void submitDiskBalancerPlan(String planID, long planVersion,
|
||||
@Override
|
||||
public void cancelDiskBalancePlan(String planID)
|
||||
throws IOException {
|
||||
try {
|
||||
CancelPlanRequestProto request = CancelPlanRequestProto.newBuilder()
|
||||
.setPlanID(planID).build();
|
||||
rpcProxy.cancelDiskBalancerPlan(NULL_CONTROLLER, request);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
ipc(() -> rpcProxy.cancelDiskBalancerPlan(NULL_CONTROLLER, request));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -401,11 +343,10 @@ public void cancelDiskBalancePlan(String planID)
|
||||
*/
|
||||
@Override
|
||||
public DiskBalancerWorkStatus queryDiskBalancerPlan() throws IOException {
|
||||
try {
|
||||
QueryPlanStatusRequestProto request =
|
||||
QueryPlanStatusRequestProto.newBuilder().build();
|
||||
QueryPlanStatusResponseProto response =
|
||||
rpcProxy.queryDiskBalancerPlan(NULL_CONTROLLER, request);
|
||||
ipc(() -> rpcProxy.queryDiskBalancerPlan(NULL_CONTROLLER, request));
|
||||
DiskBalancerWorkStatus.Result result = Result.NO_PLAN;
|
||||
if(response.hasResult()) {
|
||||
result = DiskBalancerWorkStatus.Result.values()[
|
||||
@ -416,30 +357,22 @@ public DiskBalancerWorkStatus queryDiskBalancerPlan() throws IOException {
|
||||
response.hasPlanID() ? response.getPlanID() : null,
|
||||
response.hasPlanFile() ? response.getPlanFile() : null,
|
||||
response.hasCurrentStatus() ? response.getCurrentStatus() : null);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getDiskBalancerSetting(String key) throws IOException {
|
||||
try {
|
||||
DiskBalancerSettingRequestProto request =
|
||||
DiskBalancerSettingRequestProto.newBuilder().setKey(key).build();
|
||||
DiskBalancerSettingResponseProto response =
|
||||
rpcProxy.getDiskBalancerSetting(NULL_CONTROLLER, request);
|
||||
ipc(() -> rpcProxy.getDiskBalancerSetting(NULL_CONTROLLER, request));
|
||||
return response.hasValue() ? response.getValue() : null;
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<DatanodeVolumeInfo> getVolumeReport() throws IOException {
|
||||
try {
|
||||
List<DatanodeVolumeInfo> volumeInfoList = new ArrayList<>();
|
||||
GetVolumeReportResponseProto volumeReport = rpcProxy.getVolumeReport(
|
||||
NULL_CONTROLLER, VOID_GET_DATANODE_STORAGE_INFO);
|
||||
GetVolumeReportResponseProto volumeReport = ipc(() -> rpcProxy.getVolumeReport(
|
||||
NULL_CONTROLLER, VOID_GET_DATANODE_STORAGE_INFO));
|
||||
List<DatanodeVolumeInfoProto> volumeProtoList = volumeReport
|
||||
.getVolumeInfoList();
|
||||
for (DatanodeVolumeInfoProto proto : volumeProtoList) {
|
||||
@ -449,8 +382,5 @@ public List<DatanodeVolumeInfo> getVolumeReport() throws IOException {
|
||||
PBHelperClient.convertStorageType(proto.getStorageType())));
|
||||
}
|
||||
return volumeInfoList;
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -209,7 +209,7 @@
|
||||
import org.apache.hadoop.io.EnumSetWritable;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.io.erasurecode.ECSchema;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.internal.ShadedProtobufHelper;
|
||||
import org.apache.hadoop.security.proto.SecurityProtos.TokenProto;
|
||||
import org.apache.hadoop.security.token.Token;
|
||||
import org.apache.hadoop.util.ChunkedArrayList;
|
||||
@ -237,7 +237,7 @@ public class PBHelperClient {
|
||||
FsAction.values();
|
||||
|
||||
private static ByteString getFixedByteString(String key) {
|
||||
return ProtobufHelper.getFixedByteString(key);
|
||||
return ShadedProtobufHelper.getFixedByteString(key);
|
||||
}
|
||||
|
||||
/**
|
||||
@ -260,7 +260,8 @@ private PBHelperClient() {
|
||||
|
||||
public static ByteString getByteString(byte[] bytes) {
|
||||
// return singleton to reduce object allocation
|
||||
return ProtobufHelper.getByteString(bytes);
|
||||
// return singleton to reduce object allocation
|
||||
return ShadedProtobufHelper.getByteString(bytes);
|
||||
}
|
||||
|
||||
public static ShmId convert(ShortCircuitShmIdProto shmId) {
|
||||
@ -328,7 +329,7 @@ public static ExtendedBlockProto convert(final ExtendedBlock b) {
|
||||
}
|
||||
|
||||
public static TokenProto convert(Token<?> tok) {
|
||||
return ProtobufHelper.protoFromToken(tok);
|
||||
return ShadedProtobufHelper.protoFromToken(tok);
|
||||
}
|
||||
|
||||
public static ShortCircuitShmIdProto convert(ShmId shmId) {
|
||||
@ -814,8 +815,8 @@ public static StorageType[] convertStorageTypes(
|
||||
|
||||
public static Token<BlockTokenIdentifier> convert(
|
||||
TokenProto blockToken) {
|
||||
return (Token<BlockTokenIdentifier>) ProtobufHelper
|
||||
.tokenFromProto(blockToken);
|
||||
return (Token<BlockTokenIdentifier>) ShadedProtobufHelper.tokenFromProto(
|
||||
blockToken);
|
||||
}
|
||||
|
||||
// DatanodeId
|
||||
|
@ -33,7 +33,6 @@
|
||||
import org.apache.hadoop.hdfs.protocol.proto.ReconfigurationProtocolProtos.ListReconfigurablePropertiesRequestProto;
|
||||
import org.apache.hadoop.hdfs.protocol.proto.ReconfigurationProtocolProtos.ListReconfigurablePropertiesResponseProto;
|
||||
import org.apache.hadoop.hdfs.protocol.proto.ReconfigurationProtocolProtos.StartReconfigurationRequestProto;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtobufRpcEngine2;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.ProtocolTranslator;
|
||||
@ -44,7 +43,8 @@
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
import org.apache.hadoop.thirdparty.protobuf.RpcController;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
|
||||
|
||||
/**
|
||||
* This class is the client side translator to translate the requests made on
|
||||
@ -102,37 +102,25 @@ public Object getUnderlyingProxyObject() {
|
||||
|
||||
@Override
|
||||
public void startReconfiguration() throws IOException {
|
||||
try {
|
||||
rpcProxy.startReconfiguration(NULL_CONTROLLER, VOID_START_RECONFIG);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
ipc(() -> rpcProxy.startReconfiguration(NULL_CONTROLLER, VOID_START_RECONFIG));
|
||||
}
|
||||
|
||||
@Override
|
||||
public ReconfigurationTaskStatus getReconfigurationStatus()
|
||||
throws IOException {
|
||||
try {
|
||||
return ReconfigurationProtocolUtils.getReconfigurationStatus(
|
||||
rpcProxy
|
||||
ipc(() -> rpcProxy
|
||||
.getReconfigurationStatus(
|
||||
NULL_CONTROLLER,
|
||||
VOID_GET_RECONFIG_STATUS));
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
VOID_GET_RECONFIG_STATUS)));
|
||||
}
|
||||
|
||||
@Override
|
||||
public List<String> listReconfigurableProperties() throws IOException {
|
||||
ListReconfigurablePropertiesResponseProto response;
|
||||
try {
|
||||
response = rpcProxy.listReconfigurableProperties(NULL_CONTROLLER,
|
||||
VOID_LIST_RECONFIGURABLE_PROPERTIES);
|
||||
response = ipc(() -> rpcProxy.listReconfigurableProperties(NULL_CONTROLLER,
|
||||
VOID_LIST_RECONFIGURABLE_PROPERTIES));
|
||||
return response.getNameList();
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -146,7 +146,7 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<dependency>
|
||||
<groupId>com.google.protobuf</groupId>
|
||||
<artifactId>protobuf-java</artifactId>
|
||||
<scope>compile</scope>
|
||||
<scope>${transient.protobuf2.scope}</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.servlet</groupId>
|
||||
|
@ -103,7 +103,6 @@
|
||||
import org.apache.hadoop.hdfs.server.federation.store.protocol.impl.pb.RemoveMountTableEntryResponsePBImpl;
|
||||
import org.apache.hadoop.hdfs.server.federation.store.protocol.impl.pb.UpdateMountTableEntryRequestPBImpl;
|
||||
import org.apache.hadoop.hdfs.server.federation.store.protocol.impl.pb.UpdateMountTableEntryResponsePBImpl;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.ProtocolTranslator;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
@ -111,6 +110,8 @@
|
||||
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.getRemoteException;
|
||||
|
||||
/**
|
||||
* This class forwards RouterAdminProtocol calls as RPC calls to the RouterAdmin server
|
||||
* while translating from the parameter types used in RouterAdminProtocol to the
|
||||
@ -156,7 +157,8 @@ public AddMountTableEntryResponse addMountTableEntry(
|
||||
rpcProxy.addMountTableEntry(null, proto);
|
||||
return new AddMountTableEntryResponsePBImpl(response);
|
||||
} catch (ServiceException e) {
|
||||
throw new IOException(ProtobufHelper.getRemoteException(e).getMessage());
|
||||
|
||||
throw new IOException(getRemoteException(e).getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@ -169,7 +171,7 @@ public AddMountTableEntriesResponse addMountTableEntries(AddMountTableEntriesReq
|
||||
AddMountTableEntriesResponseProto response = rpcProxy.addMountTableEntries(null, proto);
|
||||
return new AddMountTableEntriesResponsePBImpl(response);
|
||||
} catch (ServiceException e) {
|
||||
throw new IOException(ProtobufHelper.getRemoteException(e).getMessage());
|
||||
throw new IOException(getRemoteException(e).getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@ -184,7 +186,8 @@ public UpdateMountTableEntryResponse updateMountTableEntry(
|
||||
rpcProxy.updateMountTableEntry(null, proto);
|
||||
return new UpdateMountTableEntryResponsePBImpl(response);
|
||||
} catch (ServiceException e) {
|
||||
throw new IOException(ProtobufHelper.getRemoteException(e).getMessage());
|
||||
|
||||
throw new IOException(getRemoteException(e).getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@ -199,7 +202,8 @@ public RemoveMountTableEntryResponse removeMountTableEntry(
|
||||
rpcProxy.removeMountTableEntry(null, proto);
|
||||
return new RemoveMountTableEntryResponsePBImpl(responseProto);
|
||||
} catch (ServiceException e) {
|
||||
throw new IOException(ProtobufHelper.getRemoteException(e).getMessage());
|
||||
|
||||
throw new IOException(getRemoteException(e).getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@ -214,7 +218,8 @@ public GetMountTableEntriesResponse getMountTableEntries(
|
||||
rpcProxy.getMountTableEntries(null, proto);
|
||||
return new GetMountTableEntriesResponsePBImpl(response);
|
||||
} catch (ServiceException e) {
|
||||
throw new IOException(ProtobufHelper.getRemoteException(e).getMessage());
|
||||
|
||||
throw new IOException(getRemoteException(e).getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@ -228,7 +233,8 @@ public EnterSafeModeResponse enterSafeMode(EnterSafeModeRequest request)
|
||||
rpcProxy.enterSafeMode(null, proto);
|
||||
return new EnterSafeModeResponsePBImpl(response);
|
||||
} catch (ServiceException e) {
|
||||
throw new IOException(ProtobufHelper.getRemoteException(e).getMessage());
|
||||
|
||||
throw new IOException(getRemoteException(e).getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@ -242,7 +248,8 @@ public LeaveSafeModeResponse leaveSafeMode(LeaveSafeModeRequest request)
|
||||
rpcProxy.leaveSafeMode(null, proto);
|
||||
return new LeaveSafeModeResponsePBImpl(response);
|
||||
} catch (ServiceException e) {
|
||||
throw new IOException(ProtobufHelper.getRemoteException(e).getMessage());
|
||||
|
||||
throw new IOException(getRemoteException(e).getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@ -256,7 +263,8 @@ public GetSafeModeResponse getSafeMode(GetSafeModeRequest request)
|
||||
rpcProxy.getSafeMode(null, proto);
|
||||
return new GetSafeModeResponsePBImpl(response);
|
||||
} catch (ServiceException e) {
|
||||
throw new IOException(ProtobufHelper.getRemoteException(e).getMessage());
|
||||
|
||||
throw new IOException(getRemoteException(e).getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@ -271,7 +279,8 @@ public DisableNameserviceResponse disableNameservice(
|
||||
rpcProxy.disableNameservice(null, proto);
|
||||
return new DisableNameserviceResponsePBImpl(response);
|
||||
} catch (ServiceException e) {
|
||||
throw new IOException(ProtobufHelper.getRemoteException(e).getMessage());
|
||||
|
||||
throw new IOException(getRemoteException(e).getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@ -286,7 +295,8 @@ public EnableNameserviceResponse enableNameservice(
|
||||
rpcProxy.enableNameservice(null, proto);
|
||||
return new EnableNameserviceResponsePBImpl(response);
|
||||
} catch (ServiceException e) {
|
||||
throw new IOException(ProtobufHelper.getRemoteException(e).getMessage());
|
||||
|
||||
throw new IOException(getRemoteException(e).getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@ -300,7 +310,8 @@ public GetDisabledNameservicesResponse getDisabledNameservices(
|
||||
rpcProxy.getDisabledNameservices(null, proto);
|
||||
return new GetDisabledNameservicesResponsePBImpl(response);
|
||||
} catch (ServiceException e) {
|
||||
throw new IOException(ProtobufHelper.getRemoteException(e).getMessage());
|
||||
|
||||
throw new IOException(getRemoteException(e).getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@ -315,7 +326,8 @@ public RefreshMountTableEntriesResponse refreshMountTableEntries(
|
||||
rpcProxy.refreshMountTableEntries(null, proto);
|
||||
return new RefreshMountTableEntriesResponsePBImpl(response);
|
||||
} catch (ServiceException e) {
|
||||
throw new IOException(ProtobufHelper.getRemoteException(e).getMessage());
|
||||
|
||||
throw new IOException(getRemoteException(e).getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@ -330,7 +342,8 @@ public GetDestinationResponse getDestination(
|
||||
rpcProxy.getDestination(null, proto);
|
||||
return new GetDestinationResponsePBImpl(response);
|
||||
} catch (ServiceException e) {
|
||||
throw new IOException(ProtobufHelper.getRemoteException(e).getMessage());
|
||||
|
||||
throw new IOException(getRemoteException(e).getMessage());
|
||||
}
|
||||
}
|
||||
|
||||
@ -344,7 +357,8 @@ public boolean refreshSuperUserGroupsConfiguration() throws IOException {
|
||||
return new RefreshSuperUserGroupsConfigurationResponsePBImpl(response)
|
||||
.getStatus();
|
||||
} catch (ServiceException e) {
|
||||
throw new IOException(ProtobufHelper.getRemoteException(e).getMessage());
|
||||
|
||||
throw new IOException(getRemoteException(e).getMessage());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -211,9 +211,9 @@ public RouterAdminServer(Configuration conf, Router router)
|
||||
RefreshCallQueueProtocolProtos.RefreshCallQueueProtocolService.
|
||||
newReflectiveBlockingService(refreshCallQueueXlator);
|
||||
|
||||
DFSUtil.addPBProtocol(conf, GenericRefreshProtocolPB.class,
|
||||
DFSUtil.addInternalPBProtocol(conf, GenericRefreshProtocolPB.class,
|
||||
genericRefreshService, adminServer);
|
||||
DFSUtil.addPBProtocol(conf, RefreshCallQueueProtocolPB.class,
|
||||
DFSUtil.addInternalPBProtocol(conf, RefreshCallQueueProtocolPB.class,
|
||||
refreshCallQueueService, adminServer);
|
||||
|
||||
registerRefreshFairnessPolicyControllerHandler();
|
||||
|
@ -341,11 +341,11 @@ public RouterRpcServer(Configuration conf, Router router,
|
||||
.build();
|
||||
|
||||
// Add all the RPC protocols that the Router implements
|
||||
DFSUtil.addPBProtocol(
|
||||
DFSUtil.addInternalPBProtocol(
|
||||
conf, NamenodeProtocolPB.class, nnPbService, this.rpcServer);
|
||||
DFSUtil.addPBProtocol(conf, RefreshUserMappingsProtocolPB.class,
|
||||
DFSUtil.addInternalPBProtocol(conf, RefreshUserMappingsProtocolPB.class,
|
||||
refreshUserMappingService, this.rpcServer);
|
||||
DFSUtil.addPBProtocol(conf, GetUserMappingsProtocolPB.class,
|
||||
DFSUtil.addInternalPBProtocol(conf, GetUserMappingsProtocolPB.class,
|
||||
getUserMappingService, this.rpcServer);
|
||||
|
||||
// Set service-level authorization security policy
|
||||
|
@ -196,7 +196,7 @@ private void setupRPCServer(final Configuration conf) throws IOException {
|
||||
BlockingService nnProtoPbService =
|
||||
NamenodeProtocolService.newReflectiveBlockingService(
|
||||
nnProtoXlator);
|
||||
DFSUtil.addPBProtocol(
|
||||
DFSUtil.addInternalPBProtocol(
|
||||
conf, NamenodeProtocolPB.class, nnProtoPbService, rpcServer);
|
||||
|
||||
DatanodeProtocolServerSideTranslatorPB dnProtoPbXlator =
|
||||
@ -204,7 +204,7 @@ private void setupRPCServer(final Configuration conf) throws IOException {
|
||||
BlockingService dnProtoPbService =
|
||||
DatanodeProtocolService.newReflectiveBlockingService(
|
||||
dnProtoPbXlator);
|
||||
DFSUtil.addPBProtocol(
|
||||
DFSUtil.addInternalPBProtocol(
|
||||
conf, DatanodeProtocolPB.class, dnProtoPbService, rpcServer);
|
||||
|
||||
HAServiceProtocolServerSideTranslatorPB haServiceProtoXlator =
|
||||
@ -212,7 +212,7 @@ private void setupRPCServer(final Configuration conf) throws IOException {
|
||||
BlockingService haProtoPbService =
|
||||
HAServiceProtocolService.newReflectiveBlockingService(
|
||||
haServiceProtoXlator);
|
||||
DFSUtil.addPBProtocol(
|
||||
DFSUtil.addInternalPBProtocol(
|
||||
conf, HAServiceProtocolPB.class, haProtoPbService, rpcServer);
|
||||
|
||||
this.rpcServer.addTerseExceptions(
|
||||
|
@ -130,7 +130,7 @@ https://maven.apache.org/xsd/maven-4.0.0.xsd">
|
||||
<dependency>
|
||||
<groupId>com.google.protobuf</groupId>
|
||||
<artifactId>protobuf-java</artifactId>
|
||||
<scope>compile</scope>
|
||||
<scope>${transient.protobuf2.scope}</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>javax.servlet</groupId>
|
||||
|
@ -67,6 +67,7 @@
|
||||
import org.apache.commons.cli.ParseException;
|
||||
import org.apache.commons.cli.PosixParser;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.fs.ParentNotDirectoryException;
|
||||
import org.apache.hadoop.fs.UnresolvedLinkException;
|
||||
import org.apache.hadoop.hdfs.server.datanode.metrics.DataNodeMetrics;
|
||||
@ -1361,7 +1362,30 @@ static URI trimUri(URI uri) {
|
||||
}
|
||||
|
||||
/**
|
||||
* Add protobuf based protocol to the {@link org.apache.hadoop.ipc.RPC.Server}
|
||||
* Add protobuf based protocol to the {@link org.apache.hadoop.ipc.RPC.Server}.
|
||||
* This method is for exclusive use by the hadoop libraries, as its signature
|
||||
* changes with the version of the shaded protobuf library it has been built with.
|
||||
* @param conf configuration
|
||||
* @param protocol Protocol interface
|
||||
* @param service service that implements the protocol
|
||||
* @param server RPC server to which the protocol & implementation is
|
||||
* added to
|
||||
* @throws IOException failure
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Unstable
|
||||
public static void addInternalPBProtocol(Configuration conf,
|
||||
Class<?> protocol,
|
||||
BlockingService service,
|
||||
RPC.Server server) throws IOException {
|
||||
RPC.setProtocolEngine(conf, protocol, ProtobufRpcEngine2.class);
|
||||
server.addProtocol(RPC.RpcKind.RPC_PROTOCOL_BUFFER, protocol, service);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add protobuf based protocol to the {@link org.apache.hadoop.ipc.RPC.Server}.
|
||||
* Deprecated as it will only reliably compile if an unshaded protobuf library
|
||||
* is also on the classpath.
|
||||
* @param conf configuration
|
||||
* @param protocol Protocol interface
|
||||
* @param service service that implements the protocol
|
||||
@ -1369,17 +1393,17 @@ static URI trimUri(URI uri) {
|
||||
* added to
|
||||
* @throws IOException
|
||||
*/
|
||||
@Deprecated
|
||||
public static void addPBProtocol(Configuration conf, Class<?> protocol,
|
||||
BlockingService service, RPC.Server server) throws IOException {
|
||||
RPC.setProtocolEngine(conf, protocol, ProtobufRpcEngine2.class);
|
||||
server.addProtocol(RPC.RpcKind.RPC_PROTOCOL_BUFFER, protocol, service);
|
||||
addInternalPBProtocol(conf, protocol, service, server);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add protobuf based protocol to the {@link RPC.Server}.
|
||||
* This engine uses Protobuf 2.5.0. Recommended to upgrade to
|
||||
* Protobuf 3.x from hadoop-thirdparty and use
|
||||
* {@link DFSUtil#addPBProtocol(Configuration, Class, BlockingService,
|
||||
* {@link DFSUtil#addInternalPBProtocol(Configuration, Class, BlockingService,
|
||||
* RPC.Server)}.
|
||||
* @param conf configuration
|
||||
* @param protocol Protocol interface
|
||||
|
@ -29,7 +29,6 @@
|
||||
import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
|
||||
import org.apache.hadoop.hdfs.server.protocol.StorageReport;
|
||||
import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtobufRpcEngine2;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
@ -38,7 +37,8 @@
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
|
||||
import org.apache.hadoop.thirdparty.protobuf.RpcController;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
|
||||
|
||||
/**
|
||||
* This class is the client side translator to translate the requests made on
|
||||
@ -96,11 +96,7 @@ public void sendLifeline(DatanodeRegistration registration,
|
||||
builder.setVolumeFailureSummary(PBHelper.convertVolumeFailureSummary(
|
||||
volumeFailureSummary));
|
||||
}
|
||||
try {
|
||||
rpcProxy.sendLifeline(NULL_CONTROLLER, builder.build());
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
ipc(() -> rpcProxy.sendLifeline(NULL_CONTROLLER, builder.build()));
|
||||
}
|
||||
|
||||
@Override // ProtocolMetaInterface
|
||||
|
@ -61,7 +61,6 @@
|
||||
import org.apache.hadoop.hdfs.server.protocol.StorageReceivedDeletedBlocks;
|
||||
import org.apache.hadoop.hdfs.server.protocol.StorageReport;
|
||||
import org.apache.hadoop.hdfs.server.protocol.VolumeFailureSummary;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtobufRpcEngine2;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
@ -71,10 +70,11 @@
|
||||
|
||||
import org.apache.hadoop.classification.VisibleForTesting;
|
||||
import org.apache.hadoop.thirdparty.protobuf.RpcController;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
import javax.annotation.Nonnull;
|
||||
|
||||
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
|
||||
|
||||
/**
|
||||
* This class is the client side translator to translate the requests made on
|
||||
* {@link DatanodeProtocol} interfaces to the RPC server implementing
|
||||
@ -123,11 +123,8 @@ public DatanodeRegistration registerDatanode(DatanodeRegistration registration
|
||||
RegisterDatanodeRequestProto.Builder builder = RegisterDatanodeRequestProto
|
||||
.newBuilder().setRegistration(PBHelper.convert(registration));
|
||||
RegisterDatanodeResponseProto resp;
|
||||
try {
|
||||
resp = rpcProxy.registerDatanode(NULL_CONTROLLER, builder.build());
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
resp = ipc(() -> rpcProxy.registerDatanode(NULL_CONTROLLER, builder.build()));
|
||||
|
||||
return PBHelper.convert(resp.getRegistration());
|
||||
}
|
||||
|
||||
@ -164,11 +161,8 @@ public HeartbeatResponse sendHeartbeat(DatanodeRegistration registration,
|
||||
}
|
||||
|
||||
HeartbeatResponseProto resp;
|
||||
try {
|
||||
resp = rpcProxy.sendHeartbeat(NULL_CONTROLLER, builder.build());
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
resp = ipc(() -> rpcProxy.sendHeartbeat(NULL_CONTROLLER, builder.build()));
|
||||
|
||||
DatanodeCommand[] cmds = new DatanodeCommand[resp.getCmdsList().size()];
|
||||
int index = 0;
|
||||
for (DatanodeCommandProto p : resp.getCmdsList()) {
|
||||
@ -215,11 +209,7 @@ public DatanodeCommand blockReport(DatanodeRegistration registration,
|
||||
}
|
||||
builder.setContext(PBHelper.convert(context));
|
||||
BlockReportResponseProto resp;
|
||||
try {
|
||||
resp = rpcProxy.blockReport(NULL_CONTROLLER, builder.build());
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
resp = ipc(() -> rpcProxy.blockReport(NULL_CONTROLLER, builder.build()));
|
||||
return resp.hasCmd() ? PBHelper.convert(resp.getCmd()) : null;
|
||||
}
|
||||
|
||||
@ -235,11 +225,7 @@ public DatanodeCommand cacheReport(DatanodeRegistration registration,
|
||||
}
|
||||
|
||||
CacheReportResponseProto resp;
|
||||
try {
|
||||
resp = rpcProxy.cacheReport(NULL_CONTROLLER, builder.build());
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
resp = ipc(() -> rpcProxy.cacheReport(NULL_CONTROLLER, builder.build()));
|
||||
if (resp.hasCmd()) {
|
||||
return PBHelper.convert(resp.getCmd());
|
||||
}
|
||||
@ -264,11 +250,7 @@ public void blockReceivedAndDeleted(DatanodeRegistration registration,
|
||||
}
|
||||
builder.addBlocks(repBuilder.build());
|
||||
}
|
||||
try {
|
||||
rpcProxy.blockReceivedAndDeleted(NULL_CONTROLLER, builder.build());
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
ipc(() -> rpcProxy.blockReceivedAndDeleted(NULL_CONTROLLER, builder.build()));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -277,21 +259,13 @@ public void errorReport(DatanodeRegistration registration, int errorCode,
|
||||
ErrorReportRequestProto req = ErrorReportRequestProto.newBuilder()
|
||||
.setRegistartion(PBHelper.convert(registration))
|
||||
.setErrorCode(errorCode).setMsg(msg).build();
|
||||
try {
|
||||
rpcProxy.errorReport(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
ipc(() -> rpcProxy.errorReport(NULL_CONTROLLER, req));
|
||||
}
|
||||
|
||||
@Override
|
||||
public NamespaceInfo versionRequest() throws IOException {
|
||||
try {
|
||||
return PBHelper.convert(rpcProxy.versionRequest(NULL_CONTROLLER,
|
||||
VOID_VERSION_REQUEST).getInfo());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
return PBHelper.convert(ipc(() -> rpcProxy.versionRequest(NULL_CONTROLLER,
|
||||
VOID_VERSION_REQUEST).getInfo()));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -302,11 +276,7 @@ public void reportBadBlocks(LocatedBlock[] blocks) throws IOException {
|
||||
builder.addBlocks(i, PBHelperClient.convertLocatedBlock(blocks[i]));
|
||||
}
|
||||
ReportBadBlocksRequestProto req = builder.build();
|
||||
try {
|
||||
rpcProxy.reportBadBlocks(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
ipc(() -> rpcProxy.reportBadBlocks(NULL_CONTROLLER, req));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -327,11 +297,7 @@ public void commitBlockSynchronization(ExtendedBlock block,
|
||||
builder.addNewTargetStorages(newtargetstorages[i]);
|
||||
}
|
||||
CommitBlockSynchronizationRequestProto req = builder.build();
|
||||
try {
|
||||
rpcProxy.commitBlockSynchronization(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
ipc(() -> rpcProxy.commitBlockSynchronization(NULL_CONTROLLER, req));
|
||||
}
|
||||
|
||||
@Override // ProtocolMetaInterface
|
||||
|
@ -16,7 +16,6 @@
|
||||
*/
|
||||
package org.apache.hadoop.hdfs.protocolPB;
|
||||
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
@ -30,7 +29,6 @@
|
||||
import org.apache.hadoop.hdfs.server.common.FileRegion;
|
||||
import org.apache.hadoop.hdfs.server.namenode.ha.AbstractNNFailoverProxyProvider;
|
||||
import org.apache.hadoop.hdfs.server.namenode.ha.InMemoryAliasMapFailoverProxyProvider;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.net.NetUtils;
|
||||
import org.slf4j.Logger;
|
||||
@ -54,6 +52,7 @@
|
||||
import static org.apache.hadoop.hdfs.client.HdfsClientConfigKeys.Failover.PROXY_PROVIDER_KEY_PREFIX;
|
||||
import static org.apache.hadoop.hdfs.protocol.proto.AliasMapProtocolProtos.*;
|
||||
import static org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.*;
|
||||
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
|
||||
|
||||
/**
|
||||
* This class is the client side translator to translate requests made to the
|
||||
@ -136,8 +135,7 @@ public InMemoryAliasMap.IterationResult list(Optional<Block> marker)
|
||||
builder.setMarker(PBHelperClient.convert(marker.get()));
|
||||
}
|
||||
ListRequestProto request = builder.build();
|
||||
try {
|
||||
ListResponseProto response = rpcProxy.list(null, request);
|
||||
ListResponseProto response = ipc(() -> rpcProxy.list(null, request));
|
||||
List<KeyValueProto> fileRegionsList = response.getFileRegionsList();
|
||||
|
||||
List<FileRegion> fileRegions = fileRegionsList
|
||||
@ -156,10 +154,6 @@ public InMemoryAliasMap.IterationResult list(Optional<Block> marker)
|
||||
return new InMemoryAliasMap.IterationResult(fileRegions,
|
||||
Optional.empty());
|
||||
}
|
||||
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Nonnull
|
||||
@ -175,8 +169,7 @@ public Optional<ProvidedStorageLocation> read(@Nonnull Block block)
|
||||
.newBuilder()
|
||||
.setKey(PBHelperClient.convert(block))
|
||||
.build();
|
||||
try {
|
||||
ReadResponseProto response = rpcProxy.read(null, request);
|
||||
ReadResponseProto response = ipc(() -> rpcProxy.read(null, request));
|
||||
|
||||
ProvidedStorageLocationProto providedStorageLocation =
|
||||
response.getValue();
|
||||
@ -185,9 +178,6 @@ public Optional<ProvidedStorageLocation> read(@Nonnull Block block)
|
||||
}
|
||||
return Optional.empty();
|
||||
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -206,22 +196,14 @@ public void write(@Nonnull Block block,
|
||||
.build())
|
||||
.build();
|
||||
|
||||
try {
|
||||
rpcProxy.write(null, request);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
ipc(() -> rpcProxy.write(null, request));
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getBlockPoolId() throws IOException {
|
||||
try {
|
||||
BlockPoolResponseProto response = rpcProxy.getBlockPoolId(null,
|
||||
BlockPoolRequestProto.newBuilder().build());
|
||||
BlockPoolResponseProto response = ipc(() -> rpcProxy.getBlockPoolId(null,
|
||||
BlockPoolRequestProto.newBuilder().build()));
|
||||
return response.getBlockPoolId();
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -34,7 +34,6 @@
|
||||
import org.apache.hadoop.hdfs.server.protocol.BlockRecoveryCommand.RecoveringBlock;
|
||||
import org.apache.hadoop.hdfs.server.protocol.InterDatanodeProtocol;
|
||||
import org.apache.hadoop.hdfs.server.protocol.ReplicaRecoveryInfo;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtobufRpcEngine2;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
@ -42,7 +41,8 @@
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
|
||||
import org.apache.hadoop.thirdparty.protobuf.RpcController;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
|
||||
|
||||
/**
|
||||
* This class is the client side translator to translate the requests made on
|
||||
@ -79,11 +79,7 @@ public ReplicaRecoveryInfo initReplicaRecovery(RecoveringBlock rBlock)
|
||||
InitReplicaRecoveryRequestProto req = InitReplicaRecoveryRequestProto
|
||||
.newBuilder().setBlock(PBHelper.convert(rBlock)).build();
|
||||
InitReplicaRecoveryResponseProto resp;
|
||||
try {
|
||||
resp = rpcProxy.initReplicaRecovery(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
resp = ipc(() -> rpcProxy.initReplicaRecovery(NULL_CONTROLLER, req));
|
||||
if (!resp.getReplicaFound()) {
|
||||
// No replica found on the remote node.
|
||||
return null;
|
||||
@ -108,12 +104,9 @@ public String updateReplicaUnderRecovery(ExtendedBlock oldBlock,
|
||||
.setBlock(PBHelperClient.convert(oldBlock))
|
||||
.setNewLength(newLength).setNewBlockId(newBlockId)
|
||||
.setRecoveryId(recoveryId).build();
|
||||
try {
|
||||
return rpcProxy.updateReplicaUnderRecovery(NULL_CONTROLLER, req
|
||||
).getStorageUuid();
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
return ipc(() -> rpcProxy.updateReplicaUnderRecovery(NULL_CONTROLLER, req)
|
||||
.getStorageUuid());
|
||||
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -29,13 +29,12 @@
|
||||
import org.apache.hadoop.hdfs.server.protocol.FenceResponse;
|
||||
import org.apache.hadoop.hdfs.server.protocol.JournalInfo;
|
||||
import org.apache.hadoop.hdfs.server.protocol.JournalProtocol;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ipc.RpcClientUtil;
|
||||
|
||||
import org.apache.hadoop.thirdparty.protobuf.RpcController;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
|
||||
|
||||
/**
|
||||
* This class is the client side translator to translate the requests made on
|
||||
@ -69,11 +68,17 @@ public void journal(JournalInfo journalInfo, long epoch, long firstTxnId,
|
||||
.setNumTxns(numTxns)
|
||||
.setRecords(PBHelperClient.getByteString(records))
|
||||
.build();
|
||||
try {
|
||||
rpcProxy.journal(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
ipc(() -> rpcProxy.journal(NULL_CONTROLLER, req));
|
||||
}
|
||||
|
||||
@Override
|
||||
public FenceResponse fence(JournalInfo journalInfo, long epoch,
|
||||
String fencerInfo) throws IOException {
|
||||
FenceRequestProto req = FenceRequestProto.newBuilder().setEpoch(epoch)
|
||||
.setJournalInfo(PBHelper.convert(journalInfo)).build();
|
||||
FenceResponseProto resp = ipc(() -> rpcProxy.fence(NULL_CONTROLLER, req));
|
||||
return new FenceResponse(resp.getPreviousEpoch(),
|
||||
resp.getLastTransactionId(), resp.getInSync());
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -84,25 +89,7 @@ public void startLogSegment(JournalInfo journalInfo, long epoch, long txid)
|
||||
.setEpoch(epoch)
|
||||
.setTxid(txid)
|
||||
.build();
|
||||
try {
|
||||
rpcProxy.startLogSegment(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public FenceResponse fence(JournalInfo journalInfo, long epoch,
|
||||
String fencerInfo) throws IOException {
|
||||
FenceRequestProto req = FenceRequestProto.newBuilder().setEpoch(epoch)
|
||||
.setJournalInfo(PBHelper.convert(journalInfo)).build();
|
||||
try {
|
||||
FenceResponseProto resp = rpcProxy.fence(NULL_CONTROLLER, req);
|
||||
return new FenceResponse(resp.getPreviousEpoch(),
|
||||
resp.getLastTransactionId(), resp.getInSync());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
ipc(() -> rpcProxy.startLogSegment(NULL_CONTROLLER, req));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -51,14 +51,13 @@
|
||||
import org.apache.hadoop.hdfs.server.protocol.NamenodeRegistration;
|
||||
import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
|
||||
import org.apache.hadoop.hdfs.server.protocol.RemoteEditLogManifest;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.ProtocolTranslator;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ipc.RpcClientUtil;
|
||||
|
||||
import org.apache.hadoop.thirdparty.protobuf.RpcController;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
|
||||
|
||||
/**
|
||||
* This class is the client side translator to translate the requests made on
|
||||
@ -107,63 +106,39 @@ public BlocksWithLocations getBlocks(DatanodeInfo datanode, long size, long
|
||||
GetBlocksRequestProto req = GetBlocksRequestProto.newBuilder()
|
||||
.setDatanode(PBHelperClient.convert((DatanodeID)datanode)).setSize(size)
|
||||
.setMinBlockSize(minBlockSize).setTimeInterval(timeInterval).build();
|
||||
try {
|
||||
return PBHelper.convert(rpcProxy.getBlocks(NULL_CONTROLLER, req)
|
||||
.getBlocks());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
return PBHelper.convert(ipc(() -> rpcProxy.getBlocks(NULL_CONTROLLER, req)
|
||||
.getBlocks()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public ExportedBlockKeys getBlockKeys() throws IOException {
|
||||
try {
|
||||
GetBlockKeysResponseProto rsp = rpcProxy.getBlockKeys(NULL_CONTROLLER,
|
||||
VOID_GET_BLOCKKEYS_REQUEST);
|
||||
GetBlockKeysResponseProto rsp = ipc(() -> rpcProxy.getBlockKeys(NULL_CONTROLLER,
|
||||
VOID_GET_BLOCKKEYS_REQUEST));
|
||||
return rsp.hasKeys() ? PBHelper.convert(rsp.getKeys()) : null;
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getTransactionID() throws IOException {
|
||||
try {
|
||||
return rpcProxy.getTransactionId(NULL_CONTROLLER,
|
||||
VOID_GET_TRANSACTIONID_REQUEST).getTxId();
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
return ipc(() -> rpcProxy.getTransactionId(NULL_CONTROLLER,
|
||||
VOID_GET_TRANSACTIONID_REQUEST).getTxId());
|
||||
}
|
||||
|
||||
@Override
|
||||
public long getMostRecentCheckpointTxId() throws IOException {
|
||||
try {
|
||||
return rpcProxy.getMostRecentCheckpointTxId(NULL_CONTROLLER,
|
||||
GetMostRecentCheckpointTxIdRequestProto.getDefaultInstance()).getTxId();
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
return ipc(() -> rpcProxy.getMostRecentCheckpointTxId(NULL_CONTROLLER,
|
||||
GetMostRecentCheckpointTxIdRequestProto.getDefaultInstance()).getTxId());
|
||||
}
|
||||
|
||||
@Override
|
||||
public CheckpointSignature rollEditLog() throws IOException {
|
||||
try {
|
||||
return PBHelper.convert(rpcProxy.rollEditLog(NULL_CONTROLLER,
|
||||
VOID_ROLL_EDITLOG_REQUEST).getSignature());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
return PBHelper.convert(ipc(() -> rpcProxy.rollEditLog(NULL_CONTROLLER,
|
||||
VOID_ROLL_EDITLOG_REQUEST).getSignature()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public NamespaceInfo versionRequest() throws IOException {
|
||||
try {
|
||||
return PBHelper.convert(rpcProxy.versionRequest(NULL_CONTROLLER,
|
||||
VOID_VERSION_REQUEST).getInfo());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
return PBHelper.convert(ipc(() -> rpcProxy.versionRequest(NULL_CONTROLLER,
|
||||
VOID_VERSION_REQUEST).getInfo()));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -172,11 +147,7 @@ public void errorReport(NamenodeRegistration registration, int errorCode,
|
||||
ErrorReportRequestProto req = ErrorReportRequestProto.newBuilder()
|
||||
.setErrorCode(errorCode).setMsg(msg)
|
||||
.setRegistration(PBHelper.convert(registration)).build();
|
||||
try {
|
||||
rpcProxy.errorReport(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
ipc(() -> rpcProxy.errorReport(NULL_CONTROLLER, req));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -184,13 +155,9 @@ public NamenodeRegistration registerSubordinateNamenode(
|
||||
NamenodeRegistration registration) throws IOException {
|
||||
RegisterRequestProto req = RegisterRequestProto.newBuilder()
|
||||
.setRegistration(PBHelper.convert(registration)).build();
|
||||
try {
|
||||
return PBHelper.convert(
|
||||
rpcProxy.registerSubordinateNamenode(NULL_CONTROLLER, req)
|
||||
.getRegistration());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
ipc(() -> rpcProxy.registerSubordinateNamenode(NULL_CONTROLLER, req)
|
||||
.getRegistration()));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -199,11 +166,7 @@ public NamenodeCommand startCheckpoint(NamenodeRegistration registration)
|
||||
StartCheckpointRequestProto req = StartCheckpointRequestProto.newBuilder()
|
||||
.setRegistration(PBHelper.convert(registration)).build();
|
||||
NamenodeCommandProto cmd;
|
||||
try {
|
||||
cmd = rpcProxy.startCheckpoint(NULL_CONTROLLER, req).getCommand();
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
cmd = ipc(() -> rpcProxy.startCheckpoint(NULL_CONTROLLER, req).getCommand());
|
||||
return PBHelper.convert(cmd);
|
||||
}
|
||||
|
||||
@ -213,11 +176,7 @@ public void endCheckpoint(NamenodeRegistration registration,
|
||||
EndCheckpointRequestProto req = EndCheckpointRequestProto.newBuilder()
|
||||
.setRegistration(PBHelper.convert(registration))
|
||||
.setSignature(PBHelper.convert(sig)).build();
|
||||
try {
|
||||
rpcProxy.endCheckpoint(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
ipc(() -> rpcProxy.endCheckpoint(NULL_CONTROLLER, req));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -225,12 +184,8 @@ public RemoteEditLogManifest getEditLogManifest(long sinceTxId)
|
||||
throws IOException {
|
||||
GetEditLogManifestRequestProto req = GetEditLogManifestRequestProto
|
||||
.newBuilder().setSinceTxId(sinceTxId).build();
|
||||
try {
|
||||
return PBHelper.convert(rpcProxy.getEditLogManifest(NULL_CONTROLLER, req)
|
||||
.getManifest());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
return PBHelper.convert(ipc(() -> rpcProxy.getEditLogManifest(NULL_CONTROLLER, req)
|
||||
.getManifest()));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -244,38 +199,26 @@ public boolean isMethodSupported(String methodName) throws IOException {
|
||||
public boolean isUpgradeFinalized() throws IOException {
|
||||
IsUpgradeFinalizedRequestProto req = IsUpgradeFinalizedRequestProto
|
||||
.newBuilder().build();
|
||||
try {
|
||||
IsUpgradeFinalizedResponseProto response = rpcProxy.isUpgradeFinalized(
|
||||
NULL_CONTROLLER, req);
|
||||
IsUpgradeFinalizedResponseProto response = ipc(() -> rpcProxy.isUpgradeFinalized(
|
||||
NULL_CONTROLLER, req));
|
||||
return response.getIsUpgradeFinalized();
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isRollingUpgrade() throws IOException {
|
||||
IsRollingUpgradeRequestProto req = IsRollingUpgradeRequestProto
|
||||
.newBuilder().build();
|
||||
try {
|
||||
IsRollingUpgradeResponseProto response = rpcProxy.isRollingUpgrade(
|
||||
NULL_CONTROLLER, req);
|
||||
IsRollingUpgradeResponseProto response = ipc(() -> rpcProxy.isRollingUpgrade(
|
||||
NULL_CONTROLLER, req));
|
||||
return response.getIsRollingUpgrade();
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long getNextSPSPath() throws IOException {
|
||||
GetNextSPSPathRequestProto req =
|
||||
GetNextSPSPathRequestProto.newBuilder().build();
|
||||
try {
|
||||
GetNextSPSPathResponseProto nextSPSPath =
|
||||
rpcProxy.getNextSPSPath(NULL_CONTROLLER, req);
|
||||
ipc(() -> rpcProxy.getNextSPSPath(NULL_CONTROLLER, req));
|
||||
return nextSPSPath.hasSpsPath() ? nextSPSPath.getSpsPath() : null;
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -20,14 +20,12 @@
|
||||
package org.apache.hadoop.hdfs.qjournal.protocolPB;
|
||||
|
||||
import org.apache.hadoop.thirdparty.protobuf.RpcController;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.hdfs.qjournal.protocol.InterQJournalProtocol;
|
||||
import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.GetEditLogManifestRequestProto;
|
||||
import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos.GetEditLogManifestResponseProto;
|
||||
import org.apache.hadoop.hdfs.qjournal.protocol.QJournalProtocolProtos;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ipc.RpcClientUtil;
|
||||
@ -35,6 +33,8 @@
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
|
||||
|
||||
/**
|
||||
* This class is the client side translator to translate the requests made on
|
||||
* {@link InterQJournalProtocol} interfaces to the RPC server implementing
|
||||
@ -63,7 +63,6 @@ public void close() {
|
||||
public GetEditLogManifestResponseProto getEditLogManifestFromJournal(
|
||||
String jid, String nameServiceId, long sinceTxId, boolean inProgressOk)
|
||||
throws IOException {
|
||||
try {
|
||||
GetEditLogManifestRequestProto.Builder req;
|
||||
req = GetEditLogManifestRequestProto.newBuilder()
|
||||
.setJid(convertJournalId(jid))
|
||||
@ -72,12 +71,8 @@ public GetEditLogManifestResponseProto getEditLogManifestFromJournal(
|
||||
if (nameServiceId !=null) {
|
||||
req.setNameServiceId(nameServiceId);
|
||||
}
|
||||
return rpcProxy.getEditLogManifestFromJournal(NULL_CONTROLLER,
|
||||
req.build()
|
||||
);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
return ipc(() -> rpcProxy.getEditLogManifestFromJournal(NULL_CONTROLLER,
|
||||
req.build()));
|
||||
}
|
||||
|
||||
private QJournalProtocolProtos.JournalIdProto convertJournalId(String jid) {
|
||||
|
@ -63,13 +63,12 @@
|
||||
import org.apache.hadoop.hdfs.server.common.StorageInfo;
|
||||
import org.apache.hadoop.hdfs.server.protocol.JournalProtocol;
|
||||
import org.apache.hadoop.hdfs.server.protocol.NamespaceInfo;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ipc.RpcClientUtil;
|
||||
|
||||
import org.apache.hadoop.thirdparty.protobuf.RpcController;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
|
||||
|
||||
/**
|
||||
* This class is the client side translator to translate the requests made on
|
||||
@ -97,36 +96,28 @@ public void close() {
|
||||
@Override
|
||||
public boolean isFormatted(String journalId,
|
||||
String nameServiceId) throws IOException {
|
||||
try {
|
||||
IsFormattedRequestProto.Builder req = IsFormattedRequestProto.newBuilder()
|
||||
.setJid(convertJournalId(journalId));
|
||||
if (nameServiceId != null) {
|
||||
req.setNameServiceId(nameServiceId);
|
||||
}
|
||||
|
||||
IsFormattedResponseProto resp = rpcProxy.isFormatted(
|
||||
NULL_CONTROLLER, req.build());
|
||||
IsFormattedResponseProto resp = ipc(() -> rpcProxy.isFormatted(
|
||||
NULL_CONTROLLER, req.build()));
|
||||
return resp.getIsFormatted();
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public GetJournalStateResponseProto getJournalState(String jid,
|
||||
String nameServiceId)
|
||||
throws IOException {
|
||||
try {
|
||||
GetJournalStateRequestProto.Builder req = GetJournalStateRequestProto
|
||||
.newBuilder()
|
||||
.setJid(convertJournalId(jid));
|
||||
if (nameServiceId != null) {
|
||||
req.setNameServiceId(nameServiceId);
|
||||
}
|
||||
return rpcProxy.getJournalState(NULL_CONTROLLER, req.build());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
return ipc(() -> rpcProxy.getJournalState(NULL_CONTROLLER, req.build()));
|
||||
}
|
||||
|
||||
private JournalIdProto convertJournalId(String jid) {
|
||||
@ -140,7 +131,6 @@ public void format(String jid,
|
||||
String nameServiceId,
|
||||
NamespaceInfo nsInfo,
|
||||
boolean force) throws IOException {
|
||||
try {
|
||||
FormatRequestProto.Builder req = FormatRequestProto.newBuilder()
|
||||
.setJid(convertJournalId(jid))
|
||||
.setNsInfo(PBHelper.convert(nsInfo))
|
||||
@ -149,10 +139,7 @@ public void format(String jid,
|
||||
req.setNameServiceId(nameServiceId);
|
||||
}
|
||||
|
||||
rpcProxy.format(NULL_CONTROLLER, req.build());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
ipc(() -> rpcProxy.format(NULL_CONTROLLER, req.build()));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -160,7 +147,6 @@ public NewEpochResponseProto newEpoch(String jid,
|
||||
String nameServiceId,
|
||||
NamespaceInfo nsInfo,
|
||||
long epoch) throws IOException {
|
||||
try {
|
||||
NewEpochRequestProto.Builder req = NewEpochRequestProto.newBuilder()
|
||||
.setJid(convertJournalId(jid))
|
||||
.setNsInfo(PBHelper.convert(nsInfo))
|
||||
@ -170,10 +156,7 @@ public NewEpochResponseProto newEpoch(String jid,
|
||||
req.setNameServiceId(nameServiceId);
|
||||
}
|
||||
|
||||
return rpcProxy.newEpoch(NULL_CONTROLLER, req.build());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
return ipc(() -> rpcProxy.newEpoch(NULL_CONTROLLER, req.build()));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -187,22 +170,14 @@ public void journal(RequestInfo reqInfo,
|
||||
.setNumTxns(numTxns)
|
||||
.setRecords(PBHelperClient.getByteString(records))
|
||||
.build();
|
||||
try {
|
||||
rpcProxy.journal(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
ipc(() -> rpcProxy.journal(NULL_CONTROLLER, req));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void heartbeat(RequestInfo reqInfo) throws IOException {
|
||||
try {
|
||||
rpcProxy.heartbeat(NULL_CONTROLLER, HeartbeatRequestProto.newBuilder()
|
||||
ipc(() -> rpcProxy.heartbeat(NULL_CONTROLLER, HeartbeatRequestProto.newBuilder()
|
||||
.setReqInfo(convert(reqInfo))
|
||||
.build());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
.build()));
|
||||
}
|
||||
|
||||
private QJournalProtocolProtos.RequestInfoProto convert(
|
||||
@ -227,11 +202,7 @@ public void startLogSegment(RequestInfo reqInfo, long txid, int layoutVersion)
|
||||
.setReqInfo(convert(reqInfo))
|
||||
.setTxid(txid).setLayoutVersion(layoutVersion)
|
||||
.build();
|
||||
try {
|
||||
rpcProxy.startLogSegment(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
ipc(() -> rpcProxy.startLogSegment(NULL_CONTROLLER, req));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -243,11 +214,7 @@ public void finalizeLogSegment(RequestInfo reqInfo, long startTxId,
|
||||
.setStartTxId(startTxId)
|
||||
.setEndTxId(endTxId)
|
||||
.build();
|
||||
try {
|
||||
rpcProxy.finalizeLogSegment(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
ipc(() -> rpcProxy.finalizeLogSegment(NULL_CONTROLLER, req));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -257,18 +224,13 @@ public void purgeLogsOlderThan(RequestInfo reqInfo, long minTxIdToKeep)
|
||||
.setReqInfo(convert(reqInfo))
|
||||
.setMinTxIdToKeep(minTxIdToKeep)
|
||||
.build();
|
||||
try {
|
||||
rpcProxy.purgeLogs(NULL_CONTROLLER, req);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
ipc(() -> rpcProxy.purgeLogs(NULL_CONTROLLER, req));
|
||||
}
|
||||
|
||||
@Override
|
||||
public GetEditLogManifestResponseProto getEditLogManifest(
|
||||
String jid, String nameServiceId,
|
||||
long sinceTxId, boolean inProgressOk) throws IOException {
|
||||
try {
|
||||
GetEditLogManifestRequestProto.Builder req;
|
||||
req = GetEditLogManifestRequestProto.newBuilder()
|
||||
.setJid(convertJournalId(jid))
|
||||
@ -277,18 +239,13 @@ public GetEditLogManifestResponseProto getEditLogManifest(
|
||||
if (nameServiceId !=null) {
|
||||
req.setNameServiceId(nameServiceId);
|
||||
}
|
||||
return rpcProxy.getEditLogManifest(NULL_CONTROLLER,
|
||||
req.build()
|
||||
);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
return ipc(() -> rpcProxy.getEditLogManifest(NULL_CONTROLLER,
|
||||
req.build()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public GetJournaledEditsResponseProto getJournaledEdits(String jid,
|
||||
String nameServiceId, long sinceTxId, int maxTxns) throws IOException {
|
||||
try {
|
||||
GetJournaledEditsRequestProto.Builder req =
|
||||
GetJournaledEditsRequestProto.newBuilder()
|
||||
.setJid(convertJournalId(jid))
|
||||
@ -297,39 +254,28 @@ public GetJournaledEditsResponseProto getJournaledEdits(String jid,
|
||||
if (nameServiceId != null) {
|
||||
req.setNameServiceId(nameServiceId);
|
||||
}
|
||||
return rpcProxy.getJournaledEdits(NULL_CONTROLLER, req.build());
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
return ipc(() -> rpcProxy.getJournaledEdits(NULL_CONTROLLER, req.build()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public PrepareRecoveryResponseProto prepareRecovery(RequestInfo reqInfo,
|
||||
long segmentTxId) throws IOException {
|
||||
try {
|
||||
return rpcProxy.prepareRecovery(NULL_CONTROLLER,
|
||||
return ipc(() -> rpcProxy.prepareRecovery(NULL_CONTROLLER,
|
||||
PrepareRecoveryRequestProto.newBuilder()
|
||||
.setReqInfo(convert(reqInfo))
|
||||
.setSegmentTxId(segmentTxId)
|
||||
.build());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
.build()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void acceptRecovery(RequestInfo reqInfo,
|
||||
SegmentStateProto stateToAccept, URL fromUrl) throws IOException {
|
||||
try {
|
||||
rpcProxy.acceptRecovery(NULL_CONTROLLER,
|
||||
ipc(() -> rpcProxy.acceptRecovery(NULL_CONTROLLER,
|
||||
AcceptRecoveryRequestProto.newBuilder()
|
||||
.setReqInfo(convert(reqInfo))
|
||||
.setStateToAccept(stateToAccept)
|
||||
.setFromURL(fromUrl.toExternalForm())
|
||||
.build());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
.build()));
|
||||
}
|
||||
|
||||
public boolean isMethodSupported(String methodName) throws IOException {
|
||||
@ -340,42 +286,30 @@ public boolean isMethodSupported(String methodName) throws IOException {
|
||||
|
||||
@Override
|
||||
public void doPreUpgrade(String jid) throws IOException {
|
||||
try {
|
||||
DoPreUpgradeRequestProto.Builder req;
|
||||
req = DoPreUpgradeRequestProto.newBuilder()
|
||||
.setJid(convertJournalId(jid));
|
||||
rpcProxy.doPreUpgrade(NULL_CONTROLLER, req.build());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
ipc(() -> rpcProxy.doPreUpgrade(NULL_CONTROLLER, req.build()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doUpgrade(String journalId, StorageInfo sInfo) throws IOException {
|
||||
try {
|
||||
rpcProxy.doUpgrade(NULL_CONTROLLER,
|
||||
ipc(() -> rpcProxy.doUpgrade(NULL_CONTROLLER,
|
||||
DoUpgradeRequestProto.newBuilder()
|
||||
.setJid(convertJournalId(journalId))
|
||||
.setSInfo(PBHelper.convert(sInfo))
|
||||
.build());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
.build()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doFinalize(String jid, String nameServiceId) throws IOException {
|
||||
try {
|
||||
DoFinalizeRequestProto.Builder req = DoFinalizeRequestProto
|
||||
.newBuilder()
|
||||
.setJid(convertJournalId(jid));
|
||||
if (nameServiceId != null) {
|
||||
req.setNameServiceId(nameServiceId);
|
||||
}
|
||||
rpcProxy.doFinalize(NULL_CONTROLLER, req.build());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
ipc(() -> rpcProxy.doFinalize(NULL_CONTROLLER, req.build()));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -384,7 +318,6 @@ public Boolean canRollBack(String journalId,
|
||||
StorageInfo storage,
|
||||
StorageInfo prevStorage,
|
||||
int targetLayoutVersion) throws IOException {
|
||||
try {
|
||||
CanRollBackRequestProto.Builder req = CanRollBackRequestProto.newBuilder()
|
||||
.setJid(convertJournalId(journalId))
|
||||
.setStorage(PBHelper.convert(storage))
|
||||
@ -393,28 +326,21 @@ public Boolean canRollBack(String journalId,
|
||||
if (nameServiceId != null) {
|
||||
req.setNameServiceId(nameServiceId);
|
||||
}
|
||||
CanRollBackResponseProto response = rpcProxy.canRollBack(
|
||||
NULL_CONTROLLER, req.build());
|
||||
CanRollBackResponseProto response = ipc(() -> rpcProxy.canRollBack(
|
||||
NULL_CONTROLLER, req.build()));
|
||||
return response.getCanRollBack();
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public void doRollback(String journalId,
|
||||
String nameServiceId) throws IOException {
|
||||
try {
|
||||
DoRollbackRequestProto.Builder req = DoRollbackRequestProto.newBuilder()
|
||||
.setJid(convertJournalId(journalId));
|
||||
|
||||
if (nameServiceId != null) {
|
||||
req.setNameserviceId(nameServiceId);
|
||||
}
|
||||
rpcProxy.doRollback(NULL_CONTROLLER, req.build());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
ipc(() -> rpcProxy.doRollback(NULL_CONTROLLER, req.build()));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -422,7 +348,6 @@ public void discardSegments(String journalId,
|
||||
String nameServiceId,
|
||||
long startTxId)
|
||||
throws IOException {
|
||||
try {
|
||||
DiscardSegmentsRequestProto.Builder req = DiscardSegmentsRequestProto
|
||||
.newBuilder()
|
||||
.setJid(convertJournalId(journalId)).setStartTxId(startTxId);
|
||||
@ -430,29 +355,21 @@ public void discardSegments(String journalId,
|
||||
if (nameServiceId != null) {
|
||||
req.setNameServiceId(nameServiceId);
|
||||
}
|
||||
rpcProxy.discardSegments(NULL_CONTROLLER, req.build());
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
ipc(() -> rpcProxy.discardSegments(NULL_CONTROLLER, req.build()));
|
||||
}
|
||||
|
||||
@Override
|
||||
public Long getJournalCTime(String journalId,
|
||||
String nameServiceId) throws IOException {
|
||||
try {
|
||||
|
||||
GetJournalCTimeRequestProto.Builder req = GetJournalCTimeRequestProto
|
||||
.newBuilder()
|
||||
.setJid(convertJournalId(journalId));
|
||||
if(nameServiceId !=null) {
|
||||
req.setNameServiceId(nameServiceId);
|
||||
}
|
||||
GetJournalCTimeResponseProto response = rpcProxy.getJournalCTime(
|
||||
NULL_CONTROLLER, req.build());
|
||||
GetJournalCTimeResponseProto response = ipc(() -> rpcProxy.getJournalCTime(
|
||||
NULL_CONTROLLER, req.build()));
|
||||
return response.getResultCTime();
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -125,7 +125,7 @@ public class JournalNodeRpcServer implements QJournalProtocol,
|
||||
BlockingService interQJournalProtocolService = InterQJournalProtocolService
|
||||
.newReflectiveBlockingService(qJournalProtocolServerSideTranslatorPB);
|
||||
|
||||
DFSUtil.addPBProtocol(confCopy, InterQJournalProtocolPB.class,
|
||||
DFSUtil.addInternalPBProtocol(confCopy, InterQJournalProtocolPB.class,
|
||||
interQJournalProtocolService, server);
|
||||
|
||||
|
||||
|
@ -1528,14 +1528,14 @@ private void initIpcServer() throws IOException {
|
||||
= new ReconfigurationProtocolServerSideTranslatorPB(this);
|
||||
service = ReconfigurationProtocolService
|
||||
.newReflectiveBlockingService(reconfigurationProtocolXlator);
|
||||
DFSUtil.addPBProtocol(getConf(), ReconfigurationProtocolPB.class, service,
|
||||
DFSUtil.addInternalPBProtocol(getConf(), ReconfigurationProtocolPB.class, service,
|
||||
ipcServer);
|
||||
|
||||
InterDatanodeProtocolServerSideTranslatorPB interDatanodeProtocolXlator =
|
||||
new InterDatanodeProtocolServerSideTranslatorPB(this);
|
||||
service = InterDatanodeProtocolService
|
||||
.newReflectiveBlockingService(interDatanodeProtocolXlator);
|
||||
DFSUtil.addPBProtocol(getConf(), InterDatanodeProtocolPB.class, service,
|
||||
DFSUtil.addInternalPBProtocol(getConf(), InterDatanodeProtocolPB.class, service,
|
||||
ipcServer);
|
||||
|
||||
LOG.info("Opened IPC server at {}", ipcServer.getListenerAddress());
|
||||
|
@ -246,7 +246,7 @@ private BackupNodeRpcServer(Configuration conf, BackupNode nn)
|
||||
new JournalProtocolServerSideTranslatorPB(this);
|
||||
BlockingService service = JournalProtocolService
|
||||
.newReflectiveBlockingService(journalProtocolTranslator);
|
||||
DFSUtil.addPBProtocol(conf, JournalProtocolPB.class, service,
|
||||
DFSUtil.addInternalPBProtocol(conf, JournalProtocolPB.class, service,
|
||||
this.clientRpcServer);
|
||||
}
|
||||
|
||||
|
@ -371,24 +371,24 @@ public NameNodeRpcServer(Configuration conf, NameNode nn)
|
||||
.build();
|
||||
|
||||
// Add all the RPC protocols that the namenode implements
|
||||
DFSUtil.addPBProtocol(conf, HAServiceProtocolPB.class, haPbService,
|
||||
DFSUtil.addInternalPBProtocol(conf, HAServiceProtocolPB.class, haPbService,
|
||||
serviceRpcServer);
|
||||
DFSUtil.addPBProtocol(conf, ReconfigurationProtocolPB.class,
|
||||
DFSUtil.addInternalPBProtocol(conf, ReconfigurationProtocolPB.class,
|
||||
reconfigurationPbService, serviceRpcServer);
|
||||
DFSUtil.addPBProtocol(conf, NamenodeProtocolPB.class, NNPbService,
|
||||
DFSUtil.addInternalPBProtocol(conf, NamenodeProtocolPB.class, NNPbService,
|
||||
serviceRpcServer);
|
||||
DFSUtil.addPBProtocol(conf, DatanodeProtocolPB.class, dnProtoPbService,
|
||||
DFSUtil.addInternalPBProtocol(conf, DatanodeProtocolPB.class, dnProtoPbService,
|
||||
serviceRpcServer);
|
||||
DFSUtil.addPBProtocol(conf, RefreshAuthorizationPolicyProtocolPB.class,
|
||||
DFSUtil.addInternalPBProtocol(conf, RefreshAuthorizationPolicyProtocolPB.class,
|
||||
refreshAuthService, serviceRpcServer);
|
||||
DFSUtil.addPBProtocol(conf, RefreshUserMappingsProtocolPB.class,
|
||||
DFSUtil.addInternalPBProtocol(conf, RefreshUserMappingsProtocolPB.class,
|
||||
refreshUserMappingService, serviceRpcServer);
|
||||
// We support Refreshing call queue here in case the client RPC queue is full
|
||||
DFSUtil.addPBProtocol(conf, RefreshCallQueueProtocolPB.class,
|
||||
DFSUtil.addInternalPBProtocol(conf, RefreshCallQueueProtocolPB.class,
|
||||
refreshCallQueueService, serviceRpcServer);
|
||||
DFSUtil.addPBProtocol(conf, GenericRefreshProtocolPB.class,
|
||||
DFSUtil.addInternalPBProtocol(conf, GenericRefreshProtocolPB.class,
|
||||
genericRefreshService, serviceRpcServer);
|
||||
DFSUtil.addPBProtocol(conf, GetUserMappingsProtocolPB.class,
|
||||
DFSUtil.addInternalPBProtocol(conf, GetUserMappingsProtocolPB.class,
|
||||
getUserMappingService, serviceRpcServer);
|
||||
|
||||
// Update the address with the correct port
|
||||
@ -431,7 +431,7 @@ public NameNodeRpcServer(Configuration conf, NameNode nn)
|
||||
.setSecretManager(namesystem.getDelegationTokenSecretManager())
|
||||
.build();
|
||||
|
||||
DFSUtil.addPBProtocol(conf, DatanodeLifelineProtocolPB.class,
|
||||
DFSUtil.addInternalPBProtocol(conf, DatanodeLifelineProtocolPB.class,
|
||||
lifelineProtoPbService, lifelineRpcServer);
|
||||
|
||||
// Update the address with the correct port
|
||||
@ -474,23 +474,23 @@ public NameNodeRpcServer(Configuration conf, NameNode nn)
|
||||
.build();
|
||||
|
||||
// Add all the RPC protocols that the namenode implements
|
||||
DFSUtil.addPBProtocol(conf, HAServiceProtocolPB.class, haPbService,
|
||||
DFSUtil.addInternalPBProtocol(conf, HAServiceProtocolPB.class, haPbService,
|
||||
clientRpcServer);
|
||||
DFSUtil.addPBProtocol(conf, ReconfigurationProtocolPB.class,
|
||||
DFSUtil.addInternalPBProtocol(conf, ReconfigurationProtocolPB.class,
|
||||
reconfigurationPbService, clientRpcServer);
|
||||
DFSUtil.addPBProtocol(conf, NamenodeProtocolPB.class, NNPbService,
|
||||
DFSUtil.addInternalPBProtocol(conf, NamenodeProtocolPB.class, NNPbService,
|
||||
clientRpcServer);
|
||||
DFSUtil.addPBProtocol(conf, DatanodeProtocolPB.class, dnProtoPbService,
|
||||
DFSUtil.addInternalPBProtocol(conf, DatanodeProtocolPB.class, dnProtoPbService,
|
||||
clientRpcServer);
|
||||
DFSUtil.addPBProtocol(conf, RefreshAuthorizationPolicyProtocolPB.class,
|
||||
DFSUtil.addInternalPBProtocol(conf, RefreshAuthorizationPolicyProtocolPB.class,
|
||||
refreshAuthService, clientRpcServer);
|
||||
DFSUtil.addPBProtocol(conf, RefreshUserMappingsProtocolPB.class,
|
||||
DFSUtil.addInternalPBProtocol(conf, RefreshUserMappingsProtocolPB.class,
|
||||
refreshUserMappingService, clientRpcServer);
|
||||
DFSUtil.addPBProtocol(conf, RefreshCallQueueProtocolPB.class,
|
||||
DFSUtil.addInternalPBProtocol(conf, RefreshCallQueueProtocolPB.class,
|
||||
refreshCallQueueService, clientRpcServer);
|
||||
DFSUtil.addPBProtocol(conf, GenericRefreshProtocolPB.class,
|
||||
DFSUtil.addInternalPBProtocol(conf, GenericRefreshProtocolPB.class,
|
||||
genericRefreshService, clientRpcServer);
|
||||
DFSUtil.addPBProtocol(conf, GetUserMappingsProtocolPB.class,
|
||||
DFSUtil.addInternalPBProtocol(conf, GetUserMappingsProtocolPB.class,
|
||||
getUserMappingService, clientRpcServer);
|
||||
|
||||
// set service-level authorization security policy
|
||||
|
@ -36,6 +36,10 @@
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-hdfs-client</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-common</artifactId>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.hadoop</groupId>
|
||||
<artifactId>hadoop-hdfs</artifactId>
|
||||
|
@ -22,7 +22,6 @@
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtocolMetaInterface;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.ipc.RpcClientUtil;
|
||||
@ -34,7 +33,8 @@
|
||||
import org.apache.hadoop.mapreduce.v2.hs.proto.HSAdminRefreshProtocolProtos.RefreshLogRetentionSettingsRequestProto;
|
||||
|
||||
import org.apache.hadoop.thirdparty.protobuf.RpcController;
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.ipc;
|
||||
|
||||
@Private
|
||||
public class HSAdminRefreshProtocolClientSideTranslatorPB implements
|
||||
@ -73,43 +73,27 @@ public void close() throws IOException {
|
||||
|
||||
@Override
|
||||
public void refreshAdminAcls() throws IOException {
|
||||
try {
|
||||
rpcProxy.refreshAdminAcls(NULL_CONTROLLER,
|
||||
VOID_REFRESH_ADMIN_ACLS_REQUEST);
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
ipc(() -> rpcProxy.refreshAdminAcls(NULL_CONTROLLER,
|
||||
VOID_REFRESH_ADMIN_ACLS_REQUEST));
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
public void refreshLoadedJobCache() throws IOException {
|
||||
try {
|
||||
rpcProxy.refreshLoadedJobCache(NULL_CONTROLLER,
|
||||
VOID_REFRESH_LOADED_JOB_CACHE_REQUEST);
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
ipc(() -> rpcProxy.refreshLoadedJobCache(NULL_CONTROLLER,
|
||||
VOID_REFRESH_LOADED_JOB_CACHE_REQUEST));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void refreshJobRetentionSettings() throws IOException {
|
||||
try {
|
||||
rpcProxy.refreshJobRetentionSettings(NULL_CONTROLLER,
|
||||
VOID_REFRESH_JOB_RETENTION_SETTINGS_REQUEST);
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
ipc(() -> rpcProxy.refreshJobRetentionSettings(NULL_CONTROLLER,
|
||||
VOID_REFRESH_JOB_RETENTION_SETTINGS_REQUEST));
|
||||
}
|
||||
|
||||
@Override
|
||||
public void refreshLogRetentionSettings() throws IOException {
|
||||
try {
|
||||
rpcProxy.refreshLogRetentionSettings(NULL_CONTROLLER,
|
||||
VOID_REFRESH_LOG_RETENTION_SETTINGS_REQUEST);
|
||||
} catch (ServiceException se) {
|
||||
throw ProtobufHelper.getRemoteException(se);
|
||||
}
|
||||
ipc(() -> rpcProxy.refreshLogRetentionSettings(NULL_CONTROLLER,
|
||||
VOID_REFRESH_LOG_RETENTION_SETTINGS_REQUEST));
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -39,6 +39,7 @@
|
||||
<dependency>
|
||||
<groupId>com.google.protobuf</groupId>
|
||||
<artifactId>protobuf-java</artifactId>
|
||||
<scope>${transient.protobuf2.scope}</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.apache.avro</groupId>
|
||||
|
@ -84,8 +84,14 @@
|
||||
<!-- com.google.re2j version -->
|
||||
<re2j.version>1.1</re2j.version>
|
||||
|
||||
<!--Protobuf version for backward compatibility-->
|
||||
<!-- Protobuf version for backward compatibility -->
|
||||
<!-- This is used in hadoop-common for compilation only -->
|
||||
<protobuf.version>2.5.0</protobuf.version>
|
||||
<!-- Protobuf scope in hadoop common -->
|
||||
<!-- set to "provided" and protobuf2 will no longer be exported as a dependency -->
|
||||
<common.protobuf2.scope>compile</common.protobuf2.scope>
|
||||
<!-- Protobuf scope in other modules which explicitly import the libarary -->
|
||||
<transient.protobuf2.scope>${common.protobuf2.scope}</transient.protobuf2.scope>
|
||||
<!-- ProtocolBuffer version, actually used in Hadoop -->
|
||||
<hadoop.protobuf.version>3.7.1</hadoop.protobuf.version>
|
||||
<protoc.path>${env.HADOOP_PROTOC_PATH}</protoc.path>
|
||||
|
@ -191,6 +191,7 @@
|
||||
<dependency>
|
||||
<groupId>com.google.protobuf</groupId>
|
||||
<artifactId>protobuf-java</artifactId>
|
||||
<scope>${transient.protobuf2.scope}</scope>
|
||||
</dependency>
|
||||
|
||||
<dependency>
|
||||
|
@ -132,6 +132,7 @@
|
||||
<dependency>
|
||||
<groupId>com.google.protobuf</groupId>
|
||||
<artifactId>protobuf-java</artifactId>
|
||||
<scope>${transient.protobuf2.scope}</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>org.bouncycastle</groupId>
|
||||
|
@ -24,7 +24,6 @@
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.ipc.ProtobufHelper;
|
||||
import org.apache.hadoop.ipc.ProtobufRpcEngine2;
|
||||
import org.apache.hadoop.ipc.RPC;
|
||||
import org.apache.hadoop.yarn.exceptions.YarnException;
|
||||
@ -126,6 +125,8 @@
|
||||
|
||||
import org.apache.hadoop.thirdparty.protobuf.ServiceException;
|
||||
|
||||
import static org.apache.hadoop.ipc.internal.ShadedProtobufHelper.getRemoteException;
|
||||
|
||||
@Private
|
||||
public class ResourceManagerAdministrationProtocolPBClientImpl implements ResourceManagerAdministrationProtocol, Closeable {
|
||||
|
||||
@ -243,7 +244,7 @@ public String[] getGroupsForUser(String user) throws IOException {
|
||||
return (String[]) responseProto.getGroupsList().toArray(
|
||||
new String[responseProto.getGroupsCount()]);
|
||||
} catch (ServiceException e) {
|
||||
throw ProtobufHelper.getRemoteException(e);
|
||||
throw getRemoteException(e);
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -38,6 +38,7 @@
|
||||
<groupId>com.google.protobuf</groupId>
|
||||
<artifactId>protobuf-java</artifactId>
|
||||
<version>${hadoop.protobuf.version}</version>
|
||||
<scope>${transient.protobuf2.scope}</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>io.netty</groupId>
|
||||
|
@ -89,6 +89,7 @@
|
||||
<dependency>
|
||||
<groupId>com.google.protobuf</groupId>
|
||||
<artifactId>protobuf-java</artifactId>
|
||||
<scope>${transient.protobuf2.scope}</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>junit</groupId>
|
||||
|
@ -81,6 +81,7 @@
|
||||
<dependency>
|
||||
<groupId>com.google.protobuf</groupId>
|
||||
<artifactId>protobuf-java</artifactId>
|
||||
<scope>${transient.protobuf2.scope}</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
<groupId>commons-io</groupId>
|
||||
|
Loading…
Reference in New Issue
Block a user