HDFS-12447. Rename AddECPolicyResponse to AddErasureCodingPolicyResponse. Contributed by SammiChen.

This commit is contained in:
Andrew Wang 2017-09-20 11:51:17 -07:00
parent ce943eb17a
commit a12f09ba3c
18 changed files with 85 additions and 73 deletions

View File

@ -102,7 +102,7 @@
import org.apache.hadoop.hdfs.client.impl.LeaseRenewer; import org.apache.hadoop.hdfs.client.impl.LeaseRenewer;
import org.apache.hadoop.hdfs.net.Peer; import org.apache.hadoop.hdfs.net.Peer;
import org.apache.hadoop.hdfs.protocol.AclException; import org.apache.hadoop.hdfs.protocol.AclException;
import org.apache.hadoop.hdfs.protocol.AddECPolicyResponse; import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse;
import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy; import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
import org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry; import org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry;
import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo; import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo;
@ -2807,13 +2807,14 @@ public Map<String, String> getErasureCodingCodecs() throws IOException {
} }
} }
public AddECPolicyResponse[] addErasureCodingPolicies( public AddErasureCodingPolicyResponse[] addErasureCodingPolicies(
ErasureCodingPolicy[] policies) throws IOException { ErasureCodingPolicy[] policies) throws IOException {
checkOpen(); checkOpen();
try (TraceScope ignored = tracer.newScope("addErasureCodingPolicies")) { try (TraceScope ignored = tracer.newScope("addErasureCodingPolicies")) {
return namenode.addErasureCodingPolicies(policies); return namenode.addErasureCodingPolicies(policies);
} catch (RemoteException re) { } catch (RemoteException re) {
throw re.unwrapRemoteException(AccessControlException.class); throw re.unwrapRemoteException(AccessControlException.class,
SafeModeException.class);
} }
} }
@ -2823,7 +2824,8 @@ public void removeErasureCodingPolicy(String ecPolicyName)
try (TraceScope ignored = tracer.newScope("removeErasureCodingPolicy")) { try (TraceScope ignored = tracer.newScope("removeErasureCodingPolicy")) {
namenode.removeErasureCodingPolicy(ecPolicyName); namenode.removeErasureCodingPolicy(ecPolicyName);
} catch (RemoteException re) { } catch (RemoteException re) {
throw re.unwrapRemoteException(AccessControlException.class); throw re.unwrapRemoteException(AccessControlException.class,
SafeModeException.class);
} }
} }

View File

@ -64,7 +64,7 @@
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys; import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
import org.apache.hadoop.hdfs.client.HdfsDataOutputStream; import org.apache.hadoop.hdfs.client.HdfsDataOutputStream;
import org.apache.hadoop.hdfs.client.impl.CorruptFileBlockIterator; import org.apache.hadoop.hdfs.client.impl.CorruptFileBlockIterator;
import org.apache.hadoop.hdfs.protocol.AddECPolicyResponse; import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse;
import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy; import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
import org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry; import org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry;
import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo; import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo;
@ -2650,7 +2650,7 @@ public Map<String, String> getAllErasureCodingCodecs()
* @return Return the response list of adding operations. * @return Return the response list of adding operations.
* @throws IOException * @throws IOException
*/ */
public AddECPolicyResponse[] addErasureCodingPolicies( public AddErasureCodingPolicyResponse[] addErasureCodingPolicies(
ErasureCodingPolicy[] policies) throws IOException { ErasureCodingPolicy[] policies) throws IOException {
return dfs.addErasureCodingPolicies(policies); return dfs.addErasureCodingPolicies(policies);
} }

View File

@ -35,7 +35,7 @@
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.DFSInotifyEventInputStream; import org.apache.hadoop.hdfs.DFSInotifyEventInputStream;
import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.AddECPolicyResponse; import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse;
import org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry; import org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry;
import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo; import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo;
import org.apache.hadoop.hdfs.protocol.CachePoolEntry; import org.apache.hadoop.hdfs.protocol.CachePoolEntry;
@ -562,7 +562,7 @@ public void unsetErasureCodingPolicy(final Path path) throws IOException {
* @return Return the response list of adding operations. * @return Return the response list of adding operations.
* @throws IOException * @throws IOException
*/ */
public AddECPolicyResponse[] addErasureCodingPolicies( public AddErasureCodingPolicyResponse[] addErasureCodingPolicies(
ErasureCodingPolicy[] policies) throws IOException { ErasureCodingPolicy[] policies) throws IOException {
return dfs.addErasureCodingPolicies(policies); return dfs.addErasureCodingPolicies(policies);
} }

View File

@ -22,25 +22,25 @@
/** /**
* A response of add an ErasureCoding policy. * A response of add an ErasureCoding policy.
*/ */
public class AddECPolicyResponse { public class AddErasureCodingPolicyResponse {
private boolean succeed; private boolean succeed;
private ErasureCodingPolicy policy; private ErasureCodingPolicy policy;
private String errorMsg; private String errorMsg;
public AddECPolicyResponse(ErasureCodingPolicy policy) { public AddErasureCodingPolicyResponse(ErasureCodingPolicy policy) {
this.policy = policy; this.policy = policy;
this.succeed = true; this.succeed = true;
} }
public AddECPolicyResponse(ErasureCodingPolicy policy, public AddErasureCodingPolicyResponse(ErasureCodingPolicy policy,
String errorMsg) { String errorMsg) {
this.policy = policy; this.policy = policy;
this.errorMsg = errorMsg; this.errorMsg = errorMsg;
this.succeed = false; this.succeed = false;
} }
public AddECPolicyResponse(ErasureCodingPolicy policy, public AddErasureCodingPolicyResponse(ErasureCodingPolicy policy,
HadoopIllegalArgumentException e) { HadoopIllegalArgumentException e) {
this(policy, e.getMessage()); this(policy, e.getMessage());
} }

View File

@ -1584,7 +1584,7 @@ void setErasureCodingPolicy(String src, String ecPolicyName)
* @throws IOException * @throws IOException
*/ */
@AtMostOnce @AtMostOnce
AddECPolicyResponse[] addErasureCodingPolicies( AddErasureCodingPolicyResponse[] addErasureCodingPolicies(
ErasureCodingPolicy[] policies) throws IOException; ErasureCodingPolicy[] policies) throws IOException;
/** /**

View File

@ -50,7 +50,7 @@
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.AddBlockFlag; import org.apache.hadoop.hdfs.AddBlockFlag;
import org.apache.hadoop.hdfs.inotify.EventBatchList; import org.apache.hadoop.hdfs.inotify.EventBatchList;
import org.apache.hadoop.hdfs.protocol.AddECPolicyResponse; import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse;
import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy; import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
import org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry; import org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry;
import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo; import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo;
@ -1718,7 +1718,7 @@ public EventBatchList getEditsFromTxid(long txid) throws IOException {
} }
@Override @Override
public AddECPolicyResponse[] addErasureCodingPolicies( public AddErasureCodingPolicyResponse[] addErasureCodingPolicies(
ErasureCodingPolicy[] policies) throws IOException { ErasureCodingPolicy[] policies) throws IOException {
List<ErasureCodingPolicyProto> protos = Arrays.stream(policies) List<ErasureCodingPolicyProto> protos = Arrays.stream(policies)
.map(PBHelperClient::convertErasureCodingPolicy) .map(PBHelperClient::convertErasureCodingPolicy)
@ -1729,9 +1729,10 @@ public AddECPolicyResponse[] addErasureCodingPolicies(
try { try {
AddErasureCodingPoliciesResponseProto rep = rpcProxy AddErasureCodingPoliciesResponseProto rep = rpcProxy
.addErasureCodingPolicies(null, req); .addErasureCodingPolicies(null, req);
AddECPolicyResponse[] responses = rep.getResponsesList().stream() AddErasureCodingPolicyResponse[] responses =
.map(PBHelperClient::convertAddECPolicyResponse) rep.getResponsesList().stream()
.toArray(AddECPolicyResponse[]::new); .map(PBHelperClient::convertAddErasureCodingPolicyResponse)
.toArray(AddErasureCodingPolicyResponse[]::new);
return responses; return responses;
} catch (ServiceException e) { } catch (ServiceException e) {
throw ProtobufHelper.getRemoteException(e); throw ProtobufHelper.getRemoteException(e);

View File

@ -58,7 +58,7 @@
import org.apache.hadoop.hdfs.inotify.Event; import org.apache.hadoop.hdfs.inotify.Event;
import org.apache.hadoop.hdfs.inotify.EventBatch; import org.apache.hadoop.hdfs.inotify.EventBatch;
import org.apache.hadoop.hdfs.inotify.EventBatchList; import org.apache.hadoop.hdfs.inotify.EventBatchList;
import org.apache.hadoop.hdfs.protocol.AddECPolicyResponse; import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse;
import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy; import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
import org.apache.hadoop.hdfs.protocol.BlockType; import org.apache.hadoop.hdfs.protocol.BlockType;
@ -137,7 +137,7 @@
import org.apache.hadoop.hdfs.protocol.proto.EncryptionZonesProtos.ZoneReencryptionStatusProto; import org.apache.hadoop.hdfs.protocol.proto.EncryptionZonesProtos.ZoneReencryptionStatusProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AccessModeProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AccessModeProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddECPolicyResponseProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.AddErasureCodingPolicyResponseProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockStoragePolicyProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockStoragePolicyProto;
import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTypeProto; import org.apache.hadoop.hdfs.protocol.proto.HdfsProtos.BlockTypeProto;
@ -2981,10 +2981,11 @@ public static CodecProto convertErasureCodingCodec(String codec,
return builder.build(); return builder.build();
} }
public static AddECPolicyResponseProto convertAddECPolicyResponse( public static AddErasureCodingPolicyResponseProto
AddECPolicyResponse response) { convertAddErasureCodingPolicyResponse(
AddECPolicyResponseProto.Builder builder = AddErasureCodingPolicyResponse response) {
AddECPolicyResponseProto.newBuilder() AddErasureCodingPolicyResponseProto.Builder builder =
AddErasureCodingPolicyResponseProto.newBuilder()
.setPolicy(convertErasureCodingPolicy(response.getPolicy())) .setPolicy(convertErasureCodingPolicy(response.getPolicy()))
.setSucceed(response.isSucceed()); .setSucceed(response.isSucceed());
if (!response.isSucceed()) { if (!response.isSucceed()) {
@ -2993,13 +2994,14 @@ public static AddECPolicyResponseProto convertAddECPolicyResponse(
return builder.build(); return builder.build();
} }
public static AddECPolicyResponse convertAddECPolicyResponse( public static AddErasureCodingPolicyResponse
AddECPolicyResponseProto proto) { convertAddErasureCodingPolicyResponse(
AddErasureCodingPolicyResponseProto proto) {
ErasureCodingPolicy policy = convertErasureCodingPolicy(proto.getPolicy()); ErasureCodingPolicy policy = convertErasureCodingPolicy(proto.getPolicy());
if (proto.getSucceed()) { if (proto.getSucceed()) {
return new AddECPolicyResponse(policy); return new AddErasureCodingPolicyResponse(policy);
} else { } else {
return new AddECPolicyResponse(policy, proto.getErrorMsg()); return new AddErasureCodingPolicyResponse(policy, proto.getErrorMsg());
} }
} }

View File

@ -58,7 +58,7 @@ message AddErasureCodingPoliciesRequestProto {
} }
message AddErasureCodingPoliciesResponseProto { message AddErasureCodingPoliciesResponseProto {
repeated AddECPolicyResponseProto responses = 1; repeated AddErasureCodingPolicyResponseProto responses = 1;
} }
message RemoveErasureCodingPolicyRequestProto { message RemoveErasureCodingPolicyRequestProto {

View File

@ -390,7 +390,7 @@ message ErasureCodingPolicyProto {
optional ErasureCodingPolicyState state = 5 [default = ENABLED]; optional ErasureCodingPolicyState state = 5 [default = ENABLED];
} }
message AddECPolicyResponseProto { message AddErasureCodingPolicyResponseProto {
required ErasureCodingPolicyProto policy = 1; required ErasureCodingPolicyProto policy = 1;
required bool succeed = 2; required bool succeed = 2;
optional string errorMsg = 3; optional string errorMsg = 3;

View File

@ -36,7 +36,7 @@
import org.apache.hadoop.fs.FsServerDefaults; import org.apache.hadoop.fs.FsServerDefaults;
import org.apache.hadoop.fs.Options.Rename; import org.apache.hadoop.fs.Options.Rename;
import org.apache.hadoop.fs.QuotaUsage; import org.apache.hadoop.fs.QuotaUsage;
import org.apache.hadoop.hdfs.protocol.AddECPolicyResponse; import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse;
import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy; import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
import org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry; import org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry;
import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo; import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo;
@ -1721,15 +1721,16 @@ public AddErasureCodingPoliciesResponseProto addErasureCodingPolicies(
ErasureCodingPolicy[] policies = request.getEcPoliciesList().stream() ErasureCodingPolicy[] policies = request.getEcPoliciesList().stream()
.map(PBHelperClient::convertErasureCodingPolicy) .map(PBHelperClient::convertErasureCodingPolicy)
.toArray(ErasureCodingPolicy[]::new); .toArray(ErasureCodingPolicy[]::new);
AddECPolicyResponse[] result = server AddErasureCodingPolicyResponse[] result = server
.addErasureCodingPolicies(policies); .addErasureCodingPolicies(policies);
List<HdfsProtos.AddECPolicyResponseProto> responseProtos = Arrays List<HdfsProtos.AddErasureCodingPolicyResponseProto> responseProtos =
.stream(result).map(PBHelperClient::convertAddECPolicyResponse) Arrays.stream(result)
.collect(Collectors.toList()); .map(PBHelperClient::convertAddErasureCodingPolicyResponse)
.collect(Collectors.toList());
AddErasureCodingPoliciesResponseProto response = AddErasureCodingPoliciesResponseProto response =
AddErasureCodingPoliciesResponseProto.newBuilder() AddErasureCodingPoliciesResponseProto.newBuilder()
.addAllResponses(responseProtos).build(); .addAllResponses(responseProtos).build();
return response; return response;
} catch (IOException e) { } catch (IOException e) {
throw new ServiceException(e); throw new ServiceException(e);

View File

@ -180,7 +180,7 @@
import org.apache.hadoop.hdfs.HAUtil; import org.apache.hadoop.hdfs.HAUtil;
import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.UnknownCryptoProtocolVersionException; import org.apache.hadoop.hdfs.UnknownCryptoProtocolVersionException;
import org.apache.hadoop.hdfs.protocol.AddECPolicyResponse; import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse;
import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException; import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.BlockType; import org.apache.hadoop.hdfs.protocol.BlockType;
@ -7193,12 +7193,13 @@ void setErasureCodingPolicy(final String srcArg, final String ecPolicyName,
* rebuilding * rebuilding
* @return The according result of add operation. * @return The according result of add operation.
*/ */
AddECPolicyResponse[] addErasureCodingPolicies(ErasureCodingPolicy[] policies, AddErasureCodingPolicyResponse[] addErasureCodingPolicies(
final boolean logRetryCache) throws IOException { ErasureCodingPolicy[] policies, final boolean logRetryCache)
throws IOException {
final String operationName = "addErasureCodingPolicies"; final String operationName = "addErasureCodingPolicies";
String addECPolicyName = ""; String addECPolicyName = "";
checkOperation(OperationCategory.WRITE); checkOperation(OperationCategory.WRITE);
List<AddECPolicyResponse> responses = new ArrayList<>(); List<AddErasureCodingPolicyResponse> responses = new ArrayList<>();
boolean success = false; boolean success = false;
writeLock(); writeLock();
try { try {
@ -7210,13 +7211,13 @@ AddECPolicyResponse[] addErasureCodingPolicies(ErasureCodingPolicy[] policies,
FSDirErasureCodingOp.addErasureCodingPolicy(this, policy, FSDirErasureCodingOp.addErasureCodingPolicy(this, policy,
logRetryCache); logRetryCache);
addECPolicyName = newPolicy.getName(); addECPolicyName = newPolicy.getName();
responses.add(new AddECPolicyResponse(newPolicy)); responses.add(new AddErasureCodingPolicyResponse(newPolicy));
} catch (HadoopIllegalArgumentException e) { } catch (HadoopIllegalArgumentException e) {
responses.add(new AddECPolicyResponse(policy, e)); responses.add(new AddErasureCodingPolicyResponse(policy, e));
} }
} }
success = true; success = true;
return responses.toArray(new AddECPolicyResponse[0]); return responses.toArray(new AddErasureCodingPolicyResponse[0]);
} finally { } finally {
writeUnlock(operationName); writeUnlock(operationName);
if (success) { if (success) {

View File

@ -85,7 +85,7 @@
import org.apache.hadoop.hdfs.inotify.EventBatch; import org.apache.hadoop.hdfs.inotify.EventBatch;
import org.apache.hadoop.hdfs.inotify.EventBatchList; import org.apache.hadoop.hdfs.inotify.EventBatchList;
import org.apache.hadoop.hdfs.protocol.AclException; import org.apache.hadoop.hdfs.protocol.AclException;
import org.apache.hadoop.hdfs.protocol.AddECPolicyResponse; import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse;
import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException; import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
import org.apache.hadoop.hdfs.protocol.BlockListAsLongs; import org.apache.hadoop.hdfs.protocol.BlockListAsLongs;
import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy; import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
@ -2333,17 +2333,18 @@ public void unsetErasureCodingPolicy(String src) throws IOException {
} }
@Override @Override
public AddECPolicyResponse[] addErasureCodingPolicies( public AddErasureCodingPolicyResponse[] addErasureCodingPolicies(
ErasureCodingPolicy[] policies) throws IOException { ErasureCodingPolicy[] policies) throws IOException {
checkNNStartup(); checkNNStartup();
namesystem.checkSuperuserPrivilege(); namesystem.checkSuperuserPrivilege();
final CacheEntryWithPayload cacheEntry = final CacheEntryWithPayload cacheEntry =
RetryCache.waitForCompletion(retryCache, null); RetryCache.waitForCompletion(retryCache, null);
if (cacheEntry != null && cacheEntry.isSuccess()) { if (cacheEntry != null && cacheEntry.isSuccess()) {
return (AddECPolicyResponse[]) cacheEntry.getPayload(); return (AddErasureCodingPolicyResponse[]) cacheEntry.getPayload();
} }
boolean success = false; boolean success = false;
AddECPolicyResponse[] responses = new AddECPolicyResponse[0]; AddErasureCodingPolicyResponse[] responses =
new AddErasureCodingPolicyResponse[0];
try { try {
responses = responses =
namesystem.addErasureCodingPolicies(policies, cacheEntry != null); namesystem.addErasureCodingPolicies(policies, cacheEntry != null);

View File

@ -21,7 +21,7 @@
import org.apache.hadoop.conf.Configured; import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.AddECPolicyResponse; import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.hdfs.util.ECPolicyLoader; import org.apache.hadoop.hdfs.util.ECPolicyLoader;
import org.apache.hadoop.io.erasurecode.ErasureCodeConstants; import org.apache.hadoop.io.erasurecode.ErasureCodeConstants;
@ -175,9 +175,10 @@ public int run(Configuration conf, List<String> args) throws IOException {
List<ErasureCodingPolicy> policies = List<ErasureCodingPolicy> policies =
new ECPolicyLoader().loadPolicy(filePath); new ECPolicyLoader().loadPolicy(filePath);
if (policies.size() > 0) { if (policies.size() > 0) {
AddECPolicyResponse[] responses = dfs.addErasureCodingPolicies( AddErasureCodingPolicyResponse[] responses =
dfs.addErasureCodingPolicies(
policies.toArray(new ErasureCodingPolicy[policies.size()])); policies.toArray(new ErasureCodingPolicy[policies.size()]));
for (AddECPolicyResponse response : responses) { for (AddErasureCodingPolicyResponse response : responses) {
System.out.println(response); System.out.println(response);
} }
} else { } else {

View File

@ -108,7 +108,7 @@
import org.apache.hadoop.hdfs.MiniDFSCluster.NameNodeInfo; import org.apache.hadoop.hdfs.MiniDFSCluster.NameNodeInfo;
import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys; import org.apache.hadoop.hdfs.client.HdfsClientConfigKeys;
import org.apache.hadoop.hdfs.client.HdfsDataInputStream; import org.apache.hadoop.hdfs.client.HdfsDataInputStream;
import org.apache.hadoop.hdfs.protocol.AddECPolicyResponse; import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse;
import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo; import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo;
import org.apache.hadoop.hdfs.protocol.CachePoolInfo; import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
@ -1472,7 +1472,7 @@ public static void runOperations(MiniDFSCluster cluster,
ErasureCodingPolicy newPolicy1 = ErasureCodingPolicy newPolicy1 =
new ErasureCodingPolicy(ErasureCodeConstants.RS_3_2_SCHEMA, 8 * 1024); new ErasureCodingPolicy(ErasureCodeConstants.RS_3_2_SCHEMA, 8 * 1024);
ErasureCodingPolicy[] policyArray = new ErasureCodingPolicy[] {newPolicy1}; ErasureCodingPolicy[] policyArray = new ErasureCodingPolicy[] {newPolicy1};
AddECPolicyResponse[] responses = AddErasureCodingPolicyResponse[] responses =
filesystem.addErasureCodingPolicies(policyArray); filesystem.addErasureCodingPolicies(policyArray);
newPolicy1 = responses[0].getPolicy(); newPolicy1 = responses[0].getPolicy();

View File

@ -23,7 +23,7 @@
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.protocol.AddECPolicyResponse; import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse;
import org.apache.hadoop.hdfs.protocol.DirectoryListing; import org.apache.hadoop.hdfs.protocol.DirectoryListing;
import org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies; import org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies;
import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsConstants;
@ -686,7 +686,7 @@ public void testAddErasureCodingPolicies() throws Exception {
ErasureCodingPolicy newPolicy = ErasureCodingPolicy newPolicy =
new ErasureCodingPolicy(toAddSchema, 128 * 1024); new ErasureCodingPolicy(toAddSchema, 128 * 1024);
ErasureCodingPolicy[] policyArray = new ErasureCodingPolicy[]{newPolicy}; ErasureCodingPolicy[] policyArray = new ErasureCodingPolicy[]{newPolicy};
AddECPolicyResponse[] responses = AddErasureCodingPolicyResponse[] responses =
fs.addErasureCodingPolicies(policyArray); fs.addErasureCodingPolicies(policyArray);
assertEquals(1, responses.length); assertEquals(1, responses.length);
assertFalse(responses[0].isSucceed()); assertFalse(responses[0].isSucceed());
@ -839,7 +839,8 @@ public void testDifferentErasureCodingPolicyCellSize() throws Exception {
new ErasureCodingPolicy(ErasureCodeConstants.RS_3_2_SCHEMA, 8 * 1024); new ErasureCodingPolicy(ErasureCodeConstants.RS_3_2_SCHEMA, 8 * 1024);
ErasureCodingPolicy[] policyArray = ErasureCodingPolicy[] policyArray =
new ErasureCodingPolicy[] {newPolicy1}; new ErasureCodingPolicy[] {newPolicy1};
AddECPolicyResponse[] responses = fs.addErasureCodingPolicies(policyArray); AddErasureCodingPolicyResponse[] responses =
fs.addErasureCodingPolicies(policyArray);
assertEquals(1, responses.length); assertEquals(1, responses.length);
assertTrue(responses[0].isSucceed()); assertTrue(responses[0].isSucceed());
newPolicy1 = responses[0].getPolicy(); newPolicy1 = responses[0].getPolicy();

View File

@ -19,7 +19,7 @@
import com.google.protobuf.UninitializedMessageException; import com.google.protobuf.UninitializedMessageException;
import org.apache.hadoop.hdfs.protocol.AddECPolicyResponse; import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse;
import org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies; import org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies;
import org.apache.hadoop.hdfs.server.protocol.SlowDiskReports; import org.apache.hadoop.hdfs.server.protocol.SlowDiskReports;
@ -913,14 +913,15 @@ public void testConvertAddingECPolicyResponse() throws Exception {
// Check conversion of the built-in policies. // Check conversion of the built-in policies.
for (ErasureCodingPolicy policy : for (ErasureCodingPolicy policy :
SystemErasureCodingPolicies.getPolicies()) { SystemErasureCodingPolicies.getPolicies()) {
AddECPolicyResponse response = new AddECPolicyResponse(policy); AddErasureCodingPolicyResponse response =
HdfsProtos.AddECPolicyResponseProto proto = PBHelperClient new AddErasureCodingPolicyResponse(policy);
.convertAddECPolicyResponse(response); HdfsProtos.AddErasureCodingPolicyResponseProto proto = PBHelperClient
.convertAddErasureCodingPolicyResponse(response);
// Optional fields should not be set. // Optional fields should not be set.
assertFalse("Unnecessary field is set.", proto.hasErrorMsg()); assertFalse("Unnecessary field is set.", proto.hasErrorMsg());
// Convert proto back to an object and check for equality. // Convert proto back to an object and check for equality.
AddECPolicyResponse convertedResponse = PBHelperClient AddErasureCodingPolicyResponse convertedResponse = PBHelperClient
.convertAddECPolicyResponse(proto); .convertAddErasureCodingPolicyResponse(proto);
assertEquals("Converted policy not equal", response.getPolicy(), assertEquals("Converted policy not equal", response.getPolicy(),
convertedResponse.getPolicy()); convertedResponse.getPolicy());
assertEquals("Converted policy not equal", response.isSucceed(), assertEquals("Converted policy not equal", response.isSucceed(),
@ -929,13 +930,13 @@ public void testConvertAddingECPolicyResponse() throws Exception {
ErasureCodingPolicy policy = SystemErasureCodingPolicies ErasureCodingPolicy policy = SystemErasureCodingPolicies
.getPolicies().get(0); .getPolicies().get(0);
AddECPolicyResponse response = AddErasureCodingPolicyResponse response =
new AddECPolicyResponse(policy, "failed"); new AddErasureCodingPolicyResponse(policy, "failed");
HdfsProtos.AddECPolicyResponseProto proto = PBHelperClient HdfsProtos.AddErasureCodingPolicyResponseProto proto = PBHelperClient
.convertAddECPolicyResponse(response); .convertAddErasureCodingPolicyResponse(response);
// Convert proto back to an object and check for equality. // Convert proto back to an object and check for equality.
AddECPolicyResponse convertedResponse = PBHelperClient AddErasureCodingPolicyResponse convertedResponse = PBHelperClient
.convertAddECPolicyResponse(proto); .convertAddErasureCodingPolicyResponse(proto);
assertEquals("Converted policy not equal", response.getPolicy(), assertEquals("Converted policy not equal", response.getPolicy(),
convertedResponse.getPolicy()); convertedResponse.getPolicy());
assertEquals("Converted policy not equal", response.getErrorMsg(), assertEquals("Converted policy not equal", response.getErrorMsg(),

View File

@ -47,7 +47,7 @@
import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.StripedFileTestUtil; import org.apache.hadoop.hdfs.StripedFileTestUtil;
import org.apache.hadoop.hdfs.protocol.AddECPolicyResponse; import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse;
import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyState; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyState;
@ -735,7 +735,7 @@ public void testErasureCodingPolicyOperations() throws IOException {
ErasureCodingPolicy newPolicy = ErasureCodingPolicy newPolicy =
new ErasureCodingPolicy(schema, cellSize, (byte) 0); new ErasureCodingPolicy(schema, cellSize, (byte) 0);
ErasureCodingPolicy[] policyArray = new ErasureCodingPolicy[]{newPolicy}; ErasureCodingPolicy[] policyArray = new ErasureCodingPolicy[]{newPolicy};
AddECPolicyResponse[] responses = AddErasureCodingPolicyResponse[] responses =
fs.addErasureCodingPolicies(policyArray); fs.addErasureCodingPolicies(policyArray);
assertEquals(1, responses.length); assertEquals(1, responses.length);
assertTrue(responses[0].isSucceed()); assertTrue(responses[0].isSucceed());

View File

@ -35,7 +35,7 @@
import java.util.EnumSet; import java.util.EnumSet;
import org.apache.hadoop.hdfs.StripedFileTestUtil; import org.apache.hadoop.hdfs.StripedFileTestUtil;
import org.apache.hadoop.hdfs.protocol.AddECPolicyResponse; import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse;
import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyState; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyState;
import org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies; import org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies;
@ -847,7 +847,8 @@ public void testSaveAndLoadErasureCodingPolicies() throws IOException{
ErasureCodingPolicy newPolicy = ErasureCodingPolicy newPolicy =
new ErasureCodingPolicy(newSchema, 2 * 1024, (byte) 254); new ErasureCodingPolicy(newSchema, 2 * 1024, (byte) 254);
ErasureCodingPolicy[] policies = new ErasureCodingPolicy[]{newPolicy}; ErasureCodingPolicy[] policies = new ErasureCodingPolicy[]{newPolicy};
AddECPolicyResponse[] ret = fs.addErasureCodingPolicies(policies); AddErasureCodingPolicyResponse[] ret =
fs.addErasureCodingPolicies(policies);
assertEquals(1, ret.length); assertEquals(1, ret.length);
assertEquals(true, ret[0].isSucceed()); assertEquals(true, ret[0].isSucceed());
newPolicy = ret[0].getPolicy(); newPolicy = ret[0].getPolicy();