HDFS-12682. ECAdmin -listPolicies will always show SystemErasureCodingPolicies state as DISABLED.
This commit is contained in:
parent
1700adc6f7
commit
e565b5277d
@ -119,6 +119,7 @@
|
||||
import org.apache.hadoop.hdfs.protocol.EncryptionZone;
|
||||
import org.apache.hadoop.hdfs.protocol.EncryptionZoneIterator;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
|
||||
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
|
||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
|
||||
@ -2793,7 +2794,8 @@ public void checkAccess(String src, FsAction mode) throws IOException {
|
||||
}
|
||||
}
|
||||
|
||||
public ErasureCodingPolicy[] getErasureCodingPolicies() throws IOException {
|
||||
public ErasureCodingPolicyInfo[] getErasureCodingPolicies()
|
||||
throws IOException {
|
||||
checkOpen();
|
||||
try (TraceScope ignored = tracer.newScope("getErasureCodingPolicies")) {
|
||||
return namenode.getErasureCodingPolicies();
|
||||
|
@ -77,6 +77,7 @@
|
||||
import org.apache.hadoop.hdfs.protocol.DirectoryListing;
|
||||
import org.apache.hadoop.hdfs.protocol.EncryptionZone;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
|
||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
|
||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants.ReencryptAction;
|
||||
@ -2676,7 +2677,7 @@ public ErasureCodingPolicy next(final FileSystem fs, final Path p)
|
||||
* @return all erasure coding policies supported by this file system.
|
||||
* @throws IOException
|
||||
*/
|
||||
public Collection<ErasureCodingPolicy> getAllErasureCodingPolicies()
|
||||
public Collection<ErasureCodingPolicyInfo> getAllErasureCodingPolicies()
|
||||
throws IOException {
|
||||
return Arrays.asList(dfs.getErasureCodingPolicies());
|
||||
}
|
||||
|
@ -42,6 +42,7 @@
|
||||
import org.apache.hadoop.hdfs.protocol.CachePoolInfo;
|
||||
import org.apache.hadoop.hdfs.protocol.EncryptionZone;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
|
||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants.ReencryptAction;
|
||||
import org.apache.hadoop.hdfs.protocol.OpenFileEntry;
|
||||
@ -537,7 +538,8 @@ public ErasureCodingPolicy getErasureCodingPolicy(final Path path)
|
||||
*
|
||||
* @throws IOException
|
||||
*/
|
||||
public ErasureCodingPolicy[] getErasureCodingPolicies() throws IOException {
|
||||
public ErasureCodingPolicyInfo[] getErasureCodingPolicies()
|
||||
throws IOException {
|
||||
return dfs.getClient().getErasureCodingPolicies();
|
||||
}
|
||||
|
||||
|
@ -1619,7 +1619,7 @@ AddErasureCodingPolicyResponse[] addErasureCodingPolicies(
|
||||
* @throws IOException
|
||||
*/
|
||||
@Idempotent
|
||||
ErasureCodingPolicy[] getErasureCodingPolicies() throws IOException;
|
||||
ErasureCodingPolicyInfo[] getErasureCodingPolicies() throws IOException;
|
||||
|
||||
/**
|
||||
* Get the erasure coding codecs loaded in Namenode.
|
||||
|
@ -21,7 +21,6 @@
|
||||
import org.apache.commons.lang.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang.builder.HashCodeBuilder;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.io.erasurecode.ECSchema;
|
||||
import org.apache.hadoop.io.erasurecode.ErasureCodeConstants;
|
||||
|
||||
@ -29,22 +28,23 @@
|
||||
|
||||
/**
|
||||
* A policy about how to write/read/code an erasure coding file.
|
||||
* <p>
|
||||
* Note this class should be lightweight and immutable, because it's cached
|
||||
* by {@link SystemErasureCodingPolicies}, to be returned as a part of
|
||||
* {@link HdfsFileStatus}.
|
||||
*/
|
||||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Evolving
|
||||
@InterfaceAudience.Private
|
||||
public final class ErasureCodingPolicy implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = 0x0079fe4e;
|
||||
|
||||
private String name;
|
||||
private final String name;
|
||||
private final ECSchema schema;
|
||||
private final int cellSize;
|
||||
private byte id;
|
||||
private ErasureCodingPolicyState state;
|
||||
|
||||
private final byte id;
|
||||
|
||||
public ErasureCodingPolicy(String name, ECSchema schema,
|
||||
int cellSize, byte id, ErasureCodingPolicyState state) {
|
||||
int cellSize, byte id) {
|
||||
Preconditions.checkNotNull(name);
|
||||
Preconditions.checkNotNull(schema);
|
||||
Preconditions.checkArgument(cellSize > 0, "cellSize must be positive");
|
||||
@ -54,22 +54,14 @@ public ErasureCodingPolicy(String name, ECSchema schema,
|
||||
this.schema = schema;
|
||||
this.cellSize = cellSize;
|
||||
this.id = id;
|
||||
this.state = state;
|
||||
}
|
||||
|
||||
public ErasureCodingPolicy(String name, ECSchema schema, int cellSize,
|
||||
byte id) {
|
||||
this(name, schema, cellSize, id, ErasureCodingPolicyState.DISABLED);
|
||||
}
|
||||
|
||||
public ErasureCodingPolicy(ECSchema schema, int cellSize, byte id) {
|
||||
this(composePolicyName(schema, cellSize), schema, cellSize, id,
|
||||
ErasureCodingPolicyState.DISABLED);
|
||||
this(composePolicyName(schema, cellSize), schema, cellSize, id);
|
||||
}
|
||||
|
||||
public ErasureCodingPolicy(ECSchema schema, int cellSize) {
|
||||
this(composePolicyName(schema, cellSize), schema, cellSize, (byte) -1,
|
||||
ErasureCodingPolicyState.DISABLED);
|
||||
this(composePolicyName(schema, cellSize), schema, cellSize, (byte) -1);
|
||||
}
|
||||
|
||||
public static String composePolicyName(ECSchema schema, int cellSize) {
|
||||
@ -86,10 +78,6 @@ public String getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
public void setName(String name) {
|
||||
this.name = name;
|
||||
}
|
||||
|
||||
public ECSchema getSchema() {
|
||||
return schema;
|
||||
}
|
||||
@ -114,39 +102,14 @@ public byte getId() {
|
||||
return id;
|
||||
}
|
||||
|
||||
public void setId(byte id) {
|
||||
this.id = id;
|
||||
}
|
||||
|
||||
|
||||
public boolean isReplicationPolicy() {
|
||||
return (id == ErasureCodeConstants.REPLICATION_POLICY_ID);
|
||||
}
|
||||
|
||||
public ErasureCodingPolicyState getState() {
|
||||
return state;
|
||||
}
|
||||
|
||||
public void setState(ErasureCodingPolicyState state) {
|
||||
this.state = state;
|
||||
}
|
||||
|
||||
public boolean isSystemPolicy() {
|
||||
return (this.id < ErasureCodeConstants.USER_DEFINED_POLICY_START_ID);
|
||||
}
|
||||
|
||||
public boolean isEnabled() {
|
||||
return (this.state == ErasureCodingPolicyState.ENABLED);
|
||||
}
|
||||
|
||||
public boolean isDisabled() {
|
||||
return (this.state == ErasureCodingPolicyState.DISABLED);
|
||||
}
|
||||
|
||||
public boolean isRemoved() {
|
||||
return (this.state == ErasureCodingPolicyState.REMOVED);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (o == null) {
|
||||
@ -164,7 +127,6 @@ public boolean equals(Object o) {
|
||||
.append(schema, rhs.schema)
|
||||
.append(cellSize, rhs.cellSize)
|
||||
.append(id, rhs.id)
|
||||
.append(state, rhs.state)
|
||||
.isEquals();
|
||||
}
|
||||
|
||||
@ -175,7 +137,6 @@ public int hashCode() {
|
||||
.append(schema)
|
||||
.append(cellSize)
|
||||
.append(id)
|
||||
.append(state)
|
||||
.toHashCode();
|
||||
}
|
||||
|
||||
@ -184,8 +145,7 @@ public String toString() {
|
||||
return "ErasureCodingPolicy=[" + "Name=" + name + ", "
|
||||
+ "Schema=[" + schema.toString() + "], "
|
||||
+ "CellSize=" + cellSize + ", "
|
||||
+ "Id=" + id + ", "
|
||||
+ "State=" + state.toString()
|
||||
+ "Id=" + id
|
||||
+ "]";
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,106 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hdfs.protocol;
|
||||
|
||||
import com.google.common.base.Preconditions;
|
||||
import org.apache.commons.lang.builder.EqualsBuilder;
|
||||
import org.apache.commons.lang.builder.HashCodeBuilder;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
|
||||
import java.io.Serializable;
|
||||
|
||||
/**
|
||||
* HDFS internal presentation of a {@link ErasureCodingPolicy}. Also contains
|
||||
* additional information such as {@link ErasureCodingPolicyState}.
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
public class ErasureCodingPolicyInfo implements Serializable {
|
||||
|
||||
private static final long serialVersionUID = 0x31;
|
||||
|
||||
private final ErasureCodingPolicy policy;
|
||||
private ErasureCodingPolicyState state;
|
||||
|
||||
public ErasureCodingPolicyInfo(final ErasureCodingPolicy thePolicy,
|
||||
final ErasureCodingPolicyState theState) {
|
||||
Preconditions.checkNotNull(thePolicy);
|
||||
Preconditions.checkNotNull(theState);
|
||||
policy = thePolicy;
|
||||
state = theState;
|
||||
}
|
||||
|
||||
public ErasureCodingPolicyInfo(final ErasureCodingPolicy thePolicy) {
|
||||
this(thePolicy, ErasureCodingPolicyState.DISABLED);
|
||||
}
|
||||
|
||||
public ErasureCodingPolicy getPolicy() {
|
||||
return policy;
|
||||
}
|
||||
|
||||
public ErasureCodingPolicyState getState() {
|
||||
return state;
|
||||
}
|
||||
|
||||
public void setState(final ErasureCodingPolicyState newState) {
|
||||
Preconditions.checkNotNull(newState, "New state should not be null.");
|
||||
state = newState;
|
||||
}
|
||||
|
||||
public boolean isEnabled() {
|
||||
return (this.state == ErasureCodingPolicyState.ENABLED);
|
||||
}
|
||||
|
||||
public boolean isDisabled() {
|
||||
return (this.state == ErasureCodingPolicyState.DISABLED);
|
||||
}
|
||||
|
||||
public boolean isRemoved() {
|
||||
return (this.state == ErasureCodingPolicyState.REMOVED);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
if (o == null) {
|
||||
return false;
|
||||
}
|
||||
if (o == this) {
|
||||
return true;
|
||||
}
|
||||
if (o.getClass() != getClass()) {
|
||||
return false;
|
||||
}
|
||||
ErasureCodingPolicyInfo rhs = (ErasureCodingPolicyInfo) o;
|
||||
return new EqualsBuilder()
|
||||
.append(policy, rhs.policy)
|
||||
.append(state, rhs.state)
|
||||
.isEquals();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return new HashCodeBuilder(303855623, 582626729)
|
||||
.append(policy)
|
||||
.append(state)
|
||||
.toHashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return policy.toString() + ", State=" + state.toString();
|
||||
}
|
||||
}
|
@ -18,7 +18,6 @@
|
||||
package org.apache.hadoop.hdfs.protocol;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
|
||||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
@ -27,8 +26,7 @@
|
||||
/**
|
||||
* Value denotes the possible states of an ErasureCodingPolicy.
|
||||
*/
|
||||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Evolving
|
||||
@InterfaceAudience.Private
|
||||
public enum ErasureCodingPolicyState {
|
||||
|
||||
/** Policy is disabled. It's policy default state. */
|
||||
|
@ -64,6 +64,7 @@
|
||||
import org.apache.hadoop.hdfs.protocol.ECBlockGroupStats;
|
||||
import org.apache.hadoop.hdfs.protocol.EncryptionZone;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
|
||||
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
|
||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
|
||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants.ReencryptAction;
|
||||
@ -1782,17 +1783,17 @@ public void disableErasureCodingPolicy(String ecPolicyName)
|
||||
}
|
||||
|
||||
@Override
|
||||
public ErasureCodingPolicy[] getErasureCodingPolicies() throws IOException {
|
||||
public ErasureCodingPolicyInfo[] getErasureCodingPolicies()
|
||||
throws IOException {
|
||||
try {
|
||||
GetErasureCodingPoliciesResponseProto response = rpcProxy
|
||||
.getErasureCodingPolicies(null, VOID_GET_EC_POLICIES_REQUEST);
|
||||
ErasureCodingPolicy[] ecPolicies =
|
||||
new ErasureCodingPolicy[response.getEcPoliciesCount()];
|
||||
ErasureCodingPolicyInfo[] ecPolicies =
|
||||
new ErasureCodingPolicyInfo[response.getEcPoliciesCount()];
|
||||
int i = 0;
|
||||
for (ErasureCodingPolicyProto ecPolicyProto :
|
||||
response.getEcPoliciesList()) {
|
||||
for (ErasureCodingPolicyProto proto : response.getEcPoliciesList()) {
|
||||
ecPolicies[i++] =
|
||||
PBHelperClient.convertErasureCodingPolicy(ecPolicyProto);
|
||||
PBHelperClient.convertErasureCodingPolicyInfo(proto);
|
||||
}
|
||||
return ecPolicies;
|
||||
} catch (ServiceException e) {
|
||||
|
@ -55,6 +55,7 @@
|
||||
import org.apache.hadoop.fs.permission.FsAction;
|
||||
import org.apache.hadoop.fs.permission.FsPermission;
|
||||
import org.apache.hadoop.hdfs.DFSUtilClient;
|
||||
import org.apache.hadoop.hdfs.DistributedFileSystem;
|
||||
import org.apache.hadoop.hdfs.inotify.Event;
|
||||
import org.apache.hadoop.hdfs.inotify.EventBatch;
|
||||
import org.apache.hadoop.hdfs.inotify.EventBatchList;
|
||||
@ -79,6 +80,7 @@
|
||||
import org.apache.hadoop.hdfs.protocol.ECBlockGroupStats;
|
||||
import org.apache.hadoop.hdfs.protocol.EncryptionZone;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyState;
|
||||
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
|
||||
import org.apache.hadoop.hdfs.protocol.FsPermissionExtension;
|
||||
@ -2956,6 +2958,9 @@ public static HdfsProtos.ErasureCodingPolicyState convertECState(
|
||||
return HdfsProtos.ErasureCodingPolicyState.valueOf(state.getValue());
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert the protobuf to a {@link ErasureCodingPolicy}.
|
||||
*/
|
||||
public static ErasureCodingPolicy convertErasureCodingPolicy(
|
||||
ErasureCodingPolicyProto proto) {
|
||||
final byte id = (byte) (proto.getId() & 0xFF);
|
||||
@ -2969,28 +2974,62 @@ public static ErasureCodingPolicy convertErasureCodingPolicy(
|
||||
"Missing schema field in ErasureCodingPolicy proto");
|
||||
Preconditions.checkArgument(proto.hasCellSize(),
|
||||
"Missing cellsize field in ErasureCodingPolicy proto");
|
||||
Preconditions.checkArgument(proto.hasState(),
|
||||
"Missing state field in ErasureCodingPolicy proto");
|
||||
|
||||
return new ErasureCodingPolicy(proto.getName(),
|
||||
convertECSchema(proto.getSchema()),
|
||||
proto.getCellSize(), id, convertECState(proto.getState()));
|
||||
proto.getCellSize(), id);
|
||||
}
|
||||
return policy;
|
||||
}
|
||||
|
||||
public static ErasureCodingPolicyProto convertErasureCodingPolicy(
|
||||
/**
|
||||
* Convert the protobuf to a {@link ErasureCodingPolicyInfo}. This should only
|
||||
* be needed when the caller is interested in the state of the policy.
|
||||
*/
|
||||
public static ErasureCodingPolicyInfo convertErasureCodingPolicyInfo(
|
||||
ErasureCodingPolicyProto proto) {
|
||||
ErasureCodingPolicy policy = convertErasureCodingPolicy(proto);
|
||||
ErasureCodingPolicyInfo info = new ErasureCodingPolicyInfo(policy);
|
||||
Preconditions.checkArgument(proto.hasState(),
|
||||
"Missing state field in ErasureCodingPolicy proto");
|
||||
info.setState(convertECState(proto.getState()));
|
||||
return info;
|
||||
}
|
||||
|
||||
private static ErasureCodingPolicyProto.Builder createECPolicyProtoBuilder(
|
||||
ErasureCodingPolicy policy) {
|
||||
ErasureCodingPolicyProto.Builder builder = ErasureCodingPolicyProto
|
||||
.newBuilder()
|
||||
.setId(policy.getId());
|
||||
final ErasureCodingPolicyProto.Builder builder =
|
||||
ErasureCodingPolicyProto.newBuilder().setId(policy.getId());
|
||||
// If it's not a built-in policy, need to set the optional fields.
|
||||
if (SystemErasureCodingPolicies.getByID(policy.getId()) == null) {
|
||||
builder.setName(policy.getName())
|
||||
.setSchema(convertECSchema(policy.getSchema()))
|
||||
.setCellSize(policy.getCellSize())
|
||||
.setState(convertECState(policy.getState()));
|
||||
.setCellSize(policy.getCellSize());
|
||||
}
|
||||
return builder;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a {@link ErasureCodingPolicy} to protobuf.
|
||||
* This means no state of the policy will be set on the protobuf.
|
||||
*/
|
||||
public static ErasureCodingPolicyProto convertErasureCodingPolicy(
|
||||
ErasureCodingPolicy policy) {
|
||||
return createECPolicyProtoBuilder(policy).build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a {@link ErasureCodingPolicyInfo} to protobuf.
|
||||
* The protobuf will have the policy, and state. State is relevant when:
|
||||
* 1. Persisting a policy to fsimage
|
||||
* 2. Returning the policy to the RPC call
|
||||
* {@link DistributedFileSystem#getAllErasureCodingPolicies()}
|
||||
*/
|
||||
public static ErasureCodingPolicyProto convertErasureCodingPolicy(
|
||||
ErasureCodingPolicyInfo info) {
|
||||
final ErasureCodingPolicyProto.Builder builder =
|
||||
createECPolicyProtoBuilder(info.getPolicy());
|
||||
builder.setState(convertECState(info.getState()));
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
|
@ -0,0 +1,72 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hdfs.protocol;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies.RS_6_3_POLICY_ID;
|
||||
import static org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyState.DISABLED;
|
||||
import static org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyState.ENABLED;
|
||||
import static org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyState.REMOVED;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
/**
|
||||
* Test {@link ErasureCodingPolicyInfo}.
|
||||
*/
|
||||
public class TestErasureCodingPolicyInfo {
|
||||
|
||||
@Test
|
||||
public void testPolicyAndStateCantBeNull() {
|
||||
try {
|
||||
new ErasureCodingPolicyInfo(null);
|
||||
fail("Null policy should fail");
|
||||
} catch (NullPointerException expected) {
|
||||
}
|
||||
|
||||
try {
|
||||
new ErasureCodingPolicyInfo(SystemErasureCodingPolicies
|
||||
.getByID(RS_6_3_POLICY_ID), null);
|
||||
fail("Null policy should fail");
|
||||
} catch (NullPointerException expected) {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStates() {
|
||||
ErasureCodingPolicyInfo info =
|
||||
new ErasureCodingPolicyInfo(SystemErasureCodingPolicies
|
||||
.getByID(RS_6_3_POLICY_ID));
|
||||
|
||||
info.setState(ENABLED);
|
||||
assertFalse(info.isDisabled());
|
||||
assertTrue(info.isEnabled());
|
||||
assertFalse(info.isRemoved());
|
||||
|
||||
info.setState(REMOVED);
|
||||
assertFalse(info.isDisabled());
|
||||
assertFalse(info.isEnabled());
|
||||
assertTrue(info.isRemoved());
|
||||
|
||||
info.setState(DISABLED);
|
||||
assertTrue(info.isDisabled());
|
||||
assertFalse(info.isEnabled());
|
||||
assertFalse(info.isRemoved());
|
||||
}
|
||||
}
|
@ -46,6 +46,7 @@
|
||||
import org.apache.hadoop.hdfs.protocol.DirectoryListing;
|
||||
import org.apache.hadoop.hdfs.protocol.EncryptionZone;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
|
||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
||||
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
|
||||
import org.apache.hadoop.hdfs.protocol.LastBlockWithStatus;
|
||||
@ -1683,11 +1684,12 @@ public GetEditsFromTxidResponseProto getEditsFromTxid(RpcController controller,
|
||||
public GetErasureCodingPoliciesResponseProto getErasureCodingPolicies(RpcController controller,
|
||||
GetErasureCodingPoliciesRequestProto request) throws ServiceException {
|
||||
try {
|
||||
ErasureCodingPolicy[] ecPolicies = server.getErasureCodingPolicies();
|
||||
ErasureCodingPolicyInfo[] ecpInfos = server.getErasureCodingPolicies();
|
||||
GetErasureCodingPoliciesResponseProto.Builder resBuilder = GetErasureCodingPoliciesResponseProto
|
||||
.newBuilder();
|
||||
for (ErasureCodingPolicy ecPolicy : ecPolicies) {
|
||||
resBuilder.addEcPolicies(PBHelperClient.convertErasureCodingPolicy(ecPolicy));
|
||||
for (ErasureCodingPolicyInfo info : ecpInfos) {
|
||||
resBuilder.addEcPolicies(
|
||||
PBHelperClient.convertErasureCodingPolicy(info));
|
||||
}
|
||||
return resBuilder.build();
|
||||
} catch (IOException e) {
|
||||
|
@ -78,6 +78,7 @@
|
||||
import org.apache.hadoop.hdfs.protocol.ECBlockGroupStats;
|
||||
import org.apache.hadoop.hdfs.protocol.EncryptionZone;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
|
||||
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
|
||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants.DatanodeReportType;
|
||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants.ReencryptAction;
|
||||
@ -1785,7 +1786,8 @@ public void deleteSnapshot(String snapshotRoot, String snapshotName)
|
||||
}
|
||||
|
||||
@Override
|
||||
public ErasureCodingPolicy[] getErasureCodingPolicies() throws IOException {
|
||||
public ErasureCodingPolicyInfo[] getErasureCodingPolicies()
|
||||
throws IOException {
|
||||
checkOperation(OperationCategory.READ, false);
|
||||
return null;
|
||||
}
|
||||
|
@ -23,6 +23,7 @@
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyState;
|
||||
import org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
||||
@ -65,18 +66,18 @@ public final class ErasureCodingPolicyManager {
|
||||
* All policies sorted by name for fast querying, include built-in policy,
|
||||
* user defined policy, removed policy.
|
||||
*/
|
||||
private Map<String, ErasureCodingPolicy> policiesByName;
|
||||
private Map<String, ErasureCodingPolicyInfo> policiesByName;
|
||||
|
||||
/**
|
||||
* All policies sorted by ID for fast querying, including built-in policy,
|
||||
* user defined policy, removed policy.
|
||||
*/
|
||||
private Map<Byte, ErasureCodingPolicy> policiesByID;
|
||||
private Map<Byte, ErasureCodingPolicyInfo> policiesByID;
|
||||
|
||||
/**
|
||||
* For better performance when query all Policies.
|
||||
*/
|
||||
private ErasureCodingPolicy[] allPolicies;
|
||||
private ErasureCodingPolicyInfo[] allPolicies;
|
||||
|
||||
/**
|
||||
* All enabled policies sorted by name for fast querying, including built-in
|
||||
@ -120,15 +121,17 @@ public void init(Configuration conf) {
|
||||
*/
|
||||
for (ErasureCodingPolicy policy :
|
||||
SystemErasureCodingPolicies.getPolicies()) {
|
||||
policiesByName.put(policy.getName(), policy);
|
||||
policiesByID.put(policy.getId(), policy);
|
||||
final ErasureCodingPolicyInfo info = new ErasureCodingPolicyInfo(policy);
|
||||
policiesByName.put(policy.getName(), info);
|
||||
policiesByID.put(policy.getId(), info);
|
||||
}
|
||||
|
||||
if (!defaultPolicyName.trim().isEmpty()) {
|
||||
ErasureCodingPolicy ecPolicy = policiesByName.get(defaultPolicyName);
|
||||
if (ecPolicy == null) {
|
||||
if (!defaultPolicyName.isEmpty()) {
|
||||
final ErasureCodingPolicyInfo info =
|
||||
policiesByName.get(defaultPolicyName);
|
||||
if (info == null) {
|
||||
String names = policiesByName.values()
|
||||
.stream().map(ErasureCodingPolicy::getName)
|
||||
.stream().map((pi) -> pi.getPolicy().getName())
|
||||
.collect(Collectors.joining(", "));
|
||||
String msg = String.format("EC policy '%s' specified at %s is not a "
|
||||
+ "valid policy. Please choose from list of available "
|
||||
@ -138,11 +141,13 @@ public void init(Configuration conf) {
|
||||
names);
|
||||
throw new HadoopIllegalArgumentException(msg);
|
||||
}
|
||||
enabledPoliciesByName.put(ecPolicy.getName(), ecPolicy);
|
||||
info.setState(ErasureCodingPolicyState.ENABLED);
|
||||
enabledPoliciesByName.put(info.getPolicy().getName(), info.getPolicy());
|
||||
}
|
||||
enabledPolicies =
|
||||
enabledPoliciesByName.values().toArray(new ErasureCodingPolicy[0]);
|
||||
allPolicies = policiesByName.values().toArray(new ErasureCodingPolicy[0]);
|
||||
allPolicies =
|
||||
policiesByName.values().toArray(new ErasureCodingPolicyInfo[0]);
|
||||
|
||||
maxCellSize = conf.getInt(
|
||||
DFSConfigKeys.DFS_NAMENODE_EC_POLICIES_MAX_CELLSIZE_KEY,
|
||||
@ -190,23 +195,50 @@ public static boolean checkStoragePolicySuitableForECStripedMode(
|
||||
* Get all system defined policies and user defined policies.
|
||||
* @return all policies
|
||||
*/
|
||||
public ErasureCodingPolicy[] getPolicies() {
|
||||
public ErasureCodingPolicyInfo[] getPolicies() {
|
||||
return allPolicies;
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a policy by policy ID, including system policy and user defined policy.
|
||||
* Get a {@link ErasureCodingPolicy} by policy ID, including system policy
|
||||
* and user defined policy.
|
||||
* @return ecPolicy, or null if not found
|
||||
*/
|
||||
public ErasureCodingPolicy getByID(byte id) {
|
||||
final ErasureCodingPolicyInfo ecpi = getPolicyInfoByID(id);
|
||||
if (ecpi == null) {
|
||||
return null;
|
||||
}
|
||||
return ecpi.getPolicy();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a {@link ErasureCodingPolicyInfo} by policy ID, including system policy
|
||||
* and user defined policy.
|
||||
*/
|
||||
private ErasureCodingPolicyInfo getPolicyInfoByID(final byte id) {
|
||||
return this.policiesByID.get(id);
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a policy by policy ID, including system policy and user defined policy.
|
||||
* Get a {@link ErasureCodingPolicy} by policy name, including system
|
||||
* policy and user defined policy.
|
||||
* @return ecPolicy, or null if not found
|
||||
*/
|
||||
public ErasureCodingPolicy getByName(String name) {
|
||||
final ErasureCodingPolicyInfo ecpi = getPolicyInfoByName(name);
|
||||
if (ecpi == null) {
|
||||
return null;
|
||||
}
|
||||
return ecpi.getPolicy();
|
||||
}
|
||||
|
||||
/**
|
||||
* Get a {@link ErasureCodingPolicyInfo} by policy name, including system
|
||||
* policy and user defined policy.
|
||||
* @return ecPolicy, or null if not found
|
||||
*/
|
||||
private ErasureCodingPolicyInfo getPolicyInfoByName(final String name) {
|
||||
return this.policiesByName.get(name);
|
||||
}
|
||||
|
||||
@ -224,9 +256,6 @@ public void clear() {
|
||||
*/
|
||||
public synchronized ErasureCodingPolicy addPolicy(
|
||||
ErasureCodingPolicy policy) {
|
||||
// Set policy state into DISABLED when adding into Hadoop.
|
||||
policy.setState(ErasureCodingPolicyState.DISABLED);
|
||||
|
||||
if (!CodecUtil.hasCodec(policy.getCodecName())) {
|
||||
throw new HadoopIllegalArgumentException("Codec name "
|
||||
+ policy.getCodecName() + " is not supported");
|
||||
@ -240,7 +269,8 @@ public synchronized ErasureCodingPolicy addPolicy(
|
||||
|
||||
String assignedNewName = ErasureCodingPolicy.composePolicyName(
|
||||
policy.getSchema(), policy.getCellSize());
|
||||
for (ErasureCodingPolicy p : getPolicies()) {
|
||||
for (ErasureCodingPolicyInfo info : getPolicies()) {
|
||||
final ErasureCodingPolicy p = info.getPolicy();
|
||||
if (p.getName().equals(assignedNewName)) {
|
||||
LOG.info("The policy name " + assignedNewName + " already exists");
|
||||
return p;
|
||||
@ -261,11 +291,13 @@ public synchronized ErasureCodingPolicy addPolicy(
|
||||
ErasureCodeConstants.MAX_POLICY_ID);
|
||||
}
|
||||
|
||||
policy.setName(assignedNewName);
|
||||
policy.setId(getNextAvailablePolicyID());
|
||||
this.policiesByName.put(policy.getName(), policy);
|
||||
this.policiesByID.put(policy.getId(), policy);
|
||||
allPolicies = policiesByName.values().toArray(new ErasureCodingPolicy[0]);
|
||||
policy = new ErasureCodingPolicy(assignedNewName, policy.getSchema(),
|
||||
policy.getCellSize(), getNextAvailablePolicyID());
|
||||
final ErasureCodingPolicyInfo pi = new ErasureCodingPolicyInfo(policy);
|
||||
this.policiesByName.put(policy.getName(), pi);
|
||||
this.policiesByID.put(policy.getId(), pi);
|
||||
allPolicies =
|
||||
policiesByName.values().toArray(new ErasureCodingPolicyInfo[0]);
|
||||
return policy;
|
||||
}
|
||||
|
||||
@ -283,12 +315,13 @@ private byte getNextAvailablePolicyID() {
|
||||
* Remove an User erasure coding policy by policyName.
|
||||
*/
|
||||
public synchronized void removePolicy(String name) {
|
||||
ErasureCodingPolicy ecPolicy = policiesByName.get(name);
|
||||
if (ecPolicy == null) {
|
||||
final ErasureCodingPolicyInfo info = policiesByName.get(name);
|
||||
if (info == null) {
|
||||
throw new HadoopIllegalArgumentException("The policy name " +
|
||||
name + " does not exist");
|
||||
}
|
||||
|
||||
final ErasureCodingPolicy ecPolicy = info.getPolicy();
|
||||
if (ecPolicy.isSystemPolicy()) {
|
||||
throw new HadoopIllegalArgumentException("System erasure coding policy " +
|
||||
name + " cannot be removed");
|
||||
@ -299,7 +332,7 @@ public synchronized void removePolicy(String name) {
|
||||
enabledPolicies =
|
||||
enabledPoliciesByName.values().toArray(new ErasureCodingPolicy[0]);
|
||||
}
|
||||
ecPolicy.setState(ErasureCodingPolicyState.REMOVED);
|
||||
info.setState(ErasureCodingPolicyState.REMOVED);
|
||||
LOG.info("Remove erasure coding policy " + name);
|
||||
|
||||
/*
|
||||
@ -310,10 +343,10 @@ public synchronized void removePolicy(String name) {
|
||||
|
||||
@VisibleForTesting
|
||||
public List<ErasureCodingPolicy> getRemovedPolicies() {
|
||||
ArrayList<ErasureCodingPolicy> removedPolicies =
|
||||
new ArrayList<ErasureCodingPolicy>();
|
||||
for (ErasureCodingPolicy ecPolicy : policiesByName.values()) {
|
||||
if (ecPolicy.isRemoved()) {
|
||||
ArrayList<ErasureCodingPolicy> removedPolicies = new ArrayList<>();
|
||||
for (ErasureCodingPolicyInfo info : policiesByName.values()) {
|
||||
final ErasureCodingPolicy ecPolicy = info.getPolicy();
|
||||
if (info.isRemoved()) {
|
||||
removedPolicies.add(ecPolicy);
|
||||
}
|
||||
}
|
||||
@ -324,8 +357,8 @@ public List<ErasureCodingPolicy> getRemovedPolicies() {
|
||||
* Disable an erasure coding policy by policyName.
|
||||
*/
|
||||
public synchronized void disablePolicy(String name) {
|
||||
ErasureCodingPolicy ecPolicy = policiesByName.get(name);
|
||||
if (ecPolicy == null) {
|
||||
ErasureCodingPolicyInfo info = policiesByName.get(name);
|
||||
if (info == null) {
|
||||
throw new HadoopIllegalArgumentException("The policy name " +
|
||||
name + " does not exist");
|
||||
}
|
||||
@ -335,7 +368,7 @@ public synchronized void disablePolicy(String name) {
|
||||
enabledPolicies =
|
||||
enabledPoliciesByName.values().toArray(new ErasureCodingPolicy[0]);
|
||||
}
|
||||
ecPolicy.setState(ErasureCodingPolicyState.DISABLED);
|
||||
info.setState(ErasureCodingPolicyState.DISABLED);
|
||||
LOG.info("Disable the erasure coding policy " + name);
|
||||
}
|
||||
|
||||
@ -343,14 +376,15 @@ public synchronized void disablePolicy(String name) {
|
||||
* Enable an erasure coding policy by policyName.
|
||||
*/
|
||||
public synchronized void enablePolicy(String name) {
|
||||
ErasureCodingPolicy ecPolicy = policiesByName.get(name);
|
||||
if (ecPolicy == null) {
|
||||
final ErasureCodingPolicyInfo info = policiesByName.get(name);
|
||||
if (info == null) {
|
||||
throw new HadoopIllegalArgumentException("The policy name " +
|
||||
name + " does not exist");
|
||||
}
|
||||
|
||||
final ErasureCodingPolicy ecPolicy = info.getPolicy();
|
||||
enabledPoliciesByName.put(name, ecPolicy);
|
||||
ecPolicy.setState(ErasureCodingPolicyState.ENABLED);
|
||||
info.setState(ErasureCodingPolicyState.ENABLED);
|
||||
enabledPolicies =
|
||||
enabledPoliciesByName.values().toArray(new ErasureCodingPolicy[0]);
|
||||
LOG.info("Enable the erasure coding policy " + name);
|
||||
@ -359,17 +393,19 @@ public synchronized void enablePolicy(String name) {
|
||||
/**
|
||||
* Load an erasure coding policy into erasure coding manager.
|
||||
*/
|
||||
private void loadPolicy(ErasureCodingPolicy policy) {
|
||||
private void loadPolicy(ErasureCodingPolicyInfo info) {
|
||||
Preconditions.checkNotNull(info);
|
||||
final ErasureCodingPolicy policy = info.getPolicy();
|
||||
if (!CodecUtil.hasCodec(policy.getCodecName()) ||
|
||||
policy.getCellSize() > maxCellSize) {
|
||||
// If policy is not supported in current system, set the policy state to
|
||||
// DISABLED;
|
||||
policy.setState(ErasureCodingPolicyState.DISABLED);
|
||||
info.setState(ErasureCodingPolicyState.DISABLED);
|
||||
}
|
||||
|
||||
this.policiesByName.put(policy.getName(), policy);
|
||||
this.policiesByID.put(policy.getId(), policy);
|
||||
if (policy.isEnabled()) {
|
||||
this.policiesByName.put(policy.getName(), info);
|
||||
this.policiesByID.put(policy.getId(), info);
|
||||
if (info.isEnabled()) {
|
||||
enablePolicy(policy.getName());
|
||||
}
|
||||
}
|
||||
@ -380,11 +416,13 @@ private void loadPolicy(ErasureCodingPolicy policy) {
|
||||
* @param ecPolicies contains ErasureCodingPolicy list
|
||||
*
|
||||
*/
|
||||
public synchronized void loadPolicies(List<ErasureCodingPolicy> ecPolicies) {
|
||||
public synchronized void loadPolicies(
|
||||
List<ErasureCodingPolicyInfo> ecPolicies) {
|
||||
Preconditions.checkNotNull(ecPolicies);
|
||||
for (ErasureCodingPolicy p : ecPolicies) {
|
||||
for (ErasureCodingPolicyInfo p : ecPolicies) {
|
||||
loadPolicy(p);
|
||||
}
|
||||
allPolicies = policiesByName.values().toArray(new ErasureCodingPolicy[0]);
|
||||
allPolicies =
|
||||
policiesByName.values().toArray(new ErasureCodingPolicyInfo[0]);
|
||||
}
|
||||
}
|
@ -26,6 +26,7 @@
|
||||
import org.apache.hadoop.fs.permission.FsAction;
|
||||
import org.apache.hadoop.hdfs.XAttrHelper;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
|
||||
import org.apache.hadoop.hdfs.server.namenode.FSDirectory.DirOp;
|
||||
import org.apache.hadoop.io.IOUtils;
|
||||
import org.apache.hadoop.io.WritableUtils;
|
||||
@ -366,10 +367,10 @@ static ErasureCodingPolicy unprotectedGetErasureCodingPolicy(
|
||||
* Get available erasure coding polices.
|
||||
*
|
||||
* @param fsn namespace
|
||||
* @return {@link ErasureCodingPolicy} array
|
||||
* @return {@link ErasureCodingPolicyInfo} array
|
||||
*/
|
||||
static ErasureCodingPolicy[] getErasureCodingPolicies(final FSNamesystem fsn)
|
||||
throws IOException {
|
||||
static ErasureCodingPolicyInfo[] getErasureCodingPolicies(
|
||||
final FSNamesystem fsn) throws IOException {
|
||||
assert fsn.hasReadLock();
|
||||
return fsn.getErasureCodingPolicyManager().getPolicies();
|
||||
}
|
||||
|
@ -41,7 +41,7 @@
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
|
||||
import org.apache.hadoop.hdfs.protocolPB.PBHelperClient;
|
||||
import org.apache.hadoop.io.compress.CompressionOutputStream;
|
||||
import org.slf4j.Logger;
|
||||
@ -380,10 +380,10 @@ private void loadCacheManagerSection(InputStream in, StartupProgress prog,
|
||||
private void loadErasureCodingSection(InputStream in)
|
||||
throws IOException {
|
||||
ErasureCodingSection s = ErasureCodingSection.parseDelimitedFrom(in);
|
||||
List<ErasureCodingPolicy> ecPolicies = Lists
|
||||
List<ErasureCodingPolicyInfo> ecPolicies = Lists
|
||||
.newArrayListWithCapacity(s.getPoliciesCount());
|
||||
for (int i = 0; i < s.getPoliciesCount(); ++i) {
|
||||
ecPolicies.add(PBHelperClient.convertErasureCodingPolicy(
|
||||
ecPolicies.add(PBHelperClient.convertErasureCodingPolicyInfo(
|
||||
s.getPolicies(i)));
|
||||
}
|
||||
fsn.getErasureCodingPolicyManager().loadPolicies(ecPolicies);
|
||||
@ -586,11 +586,11 @@ private void saveCacheManagerSection(FileSummary.Builder summary)
|
||||
private void saveErasureCodingSection(
|
||||
FileSummary.Builder summary) throws IOException {
|
||||
final FSNamesystem fsn = context.getSourceNamesystem();
|
||||
ErasureCodingPolicy[] ecPolicies =
|
||||
ErasureCodingPolicyInfo[] ecPolicies =
|
||||
fsn.getErasureCodingPolicyManager().getPolicies();
|
||||
ArrayList<ErasureCodingPolicyProto> ecPolicyProtoes =
|
||||
new ArrayList<ErasureCodingPolicyProto>();
|
||||
for (ErasureCodingPolicy p : ecPolicies) {
|
||||
for (ErasureCodingPolicyInfo p : ecPolicies) {
|
||||
ecPolicyProtoes.add(PBHelperClient.convertErasureCodingPolicy(p));
|
||||
}
|
||||
|
||||
|
@ -90,6 +90,7 @@
|
||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
||||
import static org.apache.hadoop.hdfs.server.namenode.FSDirStatAndListingOp.*;
|
||||
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
|
||||
import org.apache.hadoop.hdfs.protocol.ReplicatedBlockStats;
|
||||
import org.apache.hadoop.hdfs.protocol.ECBlockGroupStats;
|
||||
import org.apache.hadoop.hdfs.protocol.OpenFileEntry;
|
||||
@ -7418,16 +7419,16 @@ ErasureCodingPolicy getErasureCodingPolicy(String src)
|
||||
}
|
||||
|
||||
/**
|
||||
* Get available erasure coding polices
|
||||
* Get all erasure coding polices.
|
||||
*/
|
||||
ErasureCodingPolicy[] getErasureCodingPolicies() throws IOException {
|
||||
ErasureCodingPolicyInfo[] getErasureCodingPolicies() throws IOException {
|
||||
final String operationName = "getErasureCodingPolicies";
|
||||
boolean success = false;
|
||||
checkOperation(OperationCategory.READ);
|
||||
readLock();
|
||||
try {
|
||||
checkOperation(OperationCategory.READ);
|
||||
final ErasureCodingPolicy[] ret =
|
||||
final ErasureCodingPolicyInfo[] ret =
|
||||
FSDirErasureCodingOp.getErasureCodingPolicies(this);
|
||||
success = true;
|
||||
return ret;
|
||||
|
@ -101,6 +101,7 @@
|
||||
import org.apache.hadoop.hdfs.protocol.ECBlockGroupStats;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
||||
import org.apache.hadoop.hdfs.protocol.EncryptionZone;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
|
||||
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
|
||||
import org.apache.hadoop.hdfs.protocol.FSLimitException;
|
||||
import org.apache.hadoop.hdfs.protocol.LastBlockWithStatus;
|
||||
@ -2303,7 +2304,8 @@ public void removeSpanReceiver(long id) throws IOException {
|
||||
}
|
||||
|
||||
@Override // ClientProtocol
|
||||
public ErasureCodingPolicy[] getErasureCodingPolicies() throws IOException {
|
||||
public ErasureCodingPolicyInfo[] getErasureCodingPolicies()
|
||||
throws IOException {
|
||||
checkNNStartup();
|
||||
return namesystem.getErasureCodingPolicies();
|
||||
}
|
||||
|
@ -23,6 +23,7 @@
|
||||
import org.apache.hadoop.hdfs.DistributedFileSystem;
|
||||
import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
|
||||
import org.apache.hadoop.hdfs.util.ECPolicyLoader;
|
||||
import org.apache.hadoop.io.erasurecode.ErasureCodeConstants;
|
||||
import org.apache.hadoop.tools.TableListing;
|
||||
@ -111,16 +112,16 @@ public int run(Configuration conf, List<String> args) throws IOException {
|
||||
|
||||
final DistributedFileSystem dfs = AdminHelper.getDFS(conf);
|
||||
try {
|
||||
Collection<ErasureCodingPolicy> policies =
|
||||
final Collection<ErasureCodingPolicyInfo> policies =
|
||||
dfs.getAllErasureCodingPolicies();
|
||||
if (policies.isEmpty()) {
|
||||
System.out.println("There is no erasure coding policies in the " +
|
||||
"cluster.");
|
||||
} else {
|
||||
System.out.println("Erasure Coding Policies:");
|
||||
for (ErasureCodingPolicy policy : policies) {
|
||||
for (ErasureCodingPolicyInfo policy : policies) {
|
||||
if (policy != null) {
|
||||
System.out.println(policy.toString());
|
||||
System.out.println(policy);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -118,6 +118,8 @@
|
||||
import org.apache.hadoop.hdfs.protocol.DatanodeInfo.AdminStates;
|
||||
import org.apache.hadoop.hdfs.protocol.DatanodeInfo.DatanodeInfoBuilder;
|
||||
import org.apache.hadoop.hdfs.protocol.ECBlockGroupStats;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyState;
|
||||
import org.apache.hadoop.hdfs.protocol.ReplicatedBlockStats;
|
||||
import org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
||||
@ -147,6 +149,7 @@
|
||||
import org.apache.hadoop.hdfs.server.datanode.SimulatedFSDataset;
|
||||
import org.apache.hadoop.hdfs.server.datanode.TestTransferRbw;
|
||||
import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi;
|
||||
import org.apache.hadoop.hdfs.server.namenode.ErasureCodingPolicyManager;
|
||||
import org.apache.hadoop.hdfs.server.namenode.FSDirectory;
|
||||
import org.apache.hadoop.hdfs.server.namenode.FSEditLog;
|
||||
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
|
||||
@ -300,6 +303,19 @@ public static void enableAllECPolicies(DistributedFileSystem fs)
|
||||
}
|
||||
}
|
||||
|
||||
public static ErasureCodingPolicyState getECPolicyState(
|
||||
final ErasureCodingPolicy policy) {
|
||||
final ErasureCodingPolicyInfo[] policyInfos =
|
||||
ErasureCodingPolicyManager.getInstance().getPolicies();
|
||||
for (ErasureCodingPolicyInfo pi : policyInfos) {
|
||||
if (pi.getPolicy().equals(policy)) {
|
||||
return pi.getState();
|
||||
}
|
||||
}
|
||||
throw new IllegalArgumentException("ErasureCodingPolicy <" + policy
|
||||
+ "> doesn't exist in the policies:" + Arrays.toString(policyInfos));
|
||||
}
|
||||
|
||||
/** class MyFile contains enough information to recreate the contents of
|
||||
* a single file.
|
||||
*/
|
||||
|
@ -25,6 +25,7 @@
|
||||
import org.apache.hadoop.fs.permission.FsPermission;
|
||||
import org.apache.hadoop.hdfs.protocol.AddErasureCodingPolicyResponse;
|
||||
import org.apache.hadoop.hdfs.protocol.DirectoryListing;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
|
||||
import org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies;
|
||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
||||
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
|
||||
@ -49,6 +50,7 @@
|
||||
import java.io.FileNotFoundException;
|
||||
import java.io.IOException;
|
||||
import java.security.PrivilegedExceptionAction;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collection;
|
||||
import java.util.EnumSet;
|
||||
import java.util.List;
|
||||
@ -375,10 +377,16 @@ public void testSetDefaultPolicy()
|
||||
|
||||
@Test
|
||||
public void testGetAllErasureCodingPolicies() throws Exception {
|
||||
Collection<ErasureCodingPolicy> allECPolicies = fs
|
||||
Collection<ErasureCodingPolicyInfo> allECPolicies = fs
|
||||
.getAllErasureCodingPolicies();
|
||||
assertTrue("All system policies should be enabled",
|
||||
allECPolicies.containsAll(SystemErasureCodingPolicies.getPolicies()));
|
||||
final List<ErasureCodingPolicy> sysPolicies =
|
||||
new ArrayList<>(SystemErasureCodingPolicies.getPolicies());
|
||||
for (ErasureCodingPolicyInfo ecpi : allECPolicies) {
|
||||
if (ecpi.isEnabled()) {
|
||||
sysPolicies.remove(ecpi.getPolicy());
|
||||
}
|
||||
}
|
||||
assertTrue("All system policies should be enabled", sysPolicies.isEmpty());
|
||||
|
||||
// Query after add a new policy
|
||||
ECSchema toAddSchema = new ECSchema("rs", 5, 2);
|
||||
@ -609,11 +617,11 @@ public void testEnforceAsReplicatedFile() throws Exception {
|
||||
fs.setErasureCodingPolicy(dirPath, ecPolicy.getName());
|
||||
|
||||
String ecPolicyName = null;
|
||||
Collection<ErasureCodingPolicy> allPolicies =
|
||||
final Collection<ErasureCodingPolicyInfo> allPoliciesInfo =
|
||||
fs.getAllErasureCodingPolicies();
|
||||
for (ErasureCodingPolicy policy : allPolicies) {
|
||||
if (!ecPolicy.equals(policy)) {
|
||||
ecPolicyName = policy.getName();
|
||||
for (ErasureCodingPolicyInfo info : allPoliciesInfo) {
|
||||
if (!ecPolicy.equals(info.getPolicy())) {
|
||||
ecPolicyName = info.getPolicy().getName();
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -748,7 +748,8 @@ public void testErasureCodingPolicyOperations() throws IOException {
|
||||
// check if new policy is reapplied through edit log
|
||||
ErasureCodingPolicy ecPolicy =
|
||||
ErasureCodingPolicyManager.getInstance().getByID(newPolicy.getId());
|
||||
assertEquals(ErasureCodingPolicyState.DISABLED, ecPolicy.getState());
|
||||
assertEquals(ErasureCodingPolicyState.DISABLED,
|
||||
DFSTestUtil.getECPolicyState(ecPolicy));
|
||||
|
||||
// 2. enable policy
|
||||
fs.enableErasureCodingPolicy(newPolicy.getName());
|
||||
@ -756,7 +757,8 @@ public void testErasureCodingPolicyOperations() throws IOException {
|
||||
cluster.waitActive();
|
||||
ecPolicy =
|
||||
ErasureCodingPolicyManager.getInstance().getByID(newPolicy.getId());
|
||||
assertEquals(ErasureCodingPolicyState.ENABLED, ecPolicy.getState());
|
||||
assertEquals(ErasureCodingPolicyState.ENABLED,
|
||||
DFSTestUtil.getECPolicyState(ecPolicy));
|
||||
|
||||
// create a new file, use the policy
|
||||
final Path dirPath = new Path("/striped");
|
||||
@ -773,7 +775,8 @@ public void testErasureCodingPolicyOperations() throws IOException {
|
||||
cluster.waitActive();
|
||||
ecPolicy =
|
||||
ErasureCodingPolicyManager.getInstance().getByID(newPolicy.getId());
|
||||
assertEquals(ErasureCodingPolicyState.DISABLED, ecPolicy.getState());
|
||||
assertEquals(ErasureCodingPolicyState.DISABLED,
|
||||
DFSTestUtil.getECPolicyState(ecPolicy));
|
||||
// read file
|
||||
DFSTestUtil.readFileAsBytes(fs, filePath);
|
||||
|
||||
@ -783,7 +786,8 @@ public void testErasureCodingPolicyOperations() throws IOException {
|
||||
cluster.waitActive();
|
||||
ecPolicy =
|
||||
ErasureCodingPolicyManager.getInstance().getByID(newPolicy.getId());
|
||||
assertEquals(ErasureCodingPolicyState.REMOVED, ecPolicy.getState());
|
||||
assertEquals(ErasureCodingPolicyState.REMOVED,
|
||||
DFSTestUtil.getECPolicyState(ecPolicy));
|
||||
// read file
|
||||
DFSTestUtil.readFileAsBytes(fs, filePath);
|
||||
|
||||
|
@ -870,7 +870,8 @@ public void testSaveAndLoadErasureCodingPolicies() throws IOException{
|
||||
newPolicy, ecPolicy);
|
||||
assertEquals(
|
||||
"Newly added erasure coding policy should be of disabled state",
|
||||
ErasureCodingPolicyState.DISABLED, ecPolicy.getState());
|
||||
ErasureCodingPolicyState.DISABLED,
|
||||
DFSTestUtil.getECPolicyState(ecPolicy));
|
||||
|
||||
// Test enable/disable/remove user customized erasure coding policy
|
||||
testChangeErasureCodingPolicyState(cluster, blockSize, newPolicy);
|
||||
@ -880,14 +881,12 @@ public void testSaveAndLoadErasureCodingPolicies() throws IOException{
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
private void testChangeErasureCodingPolicyState(MiniDFSCluster cluster,
|
||||
int blockSize, ErasureCodingPolicy targetPolicy) throws IOException {
|
||||
DistributedFileSystem fs = cluster.getFileSystem();
|
||||
|
||||
// 1. Enable an erasure coding policy
|
||||
fs.enableErasureCodingPolicy(targetPolicy.getName());
|
||||
targetPolicy.setState(ErasureCodingPolicyState.ENABLED);
|
||||
// Create file, using the new policy
|
||||
final Path dirPath = new Path("/striped");
|
||||
final Path filePath = new Path(dirPath, "file");
|
||||
@ -910,13 +909,13 @@ private void testChangeErasureCodingPolicyState(MiniDFSCluster cluster,
|
||||
assertEquals("The erasure coding policy is not found",
|
||||
targetPolicy, ecPolicy);
|
||||
assertEquals("The erasure coding policy should be of enabled state",
|
||||
ErasureCodingPolicyState.ENABLED, ecPolicy.getState());
|
||||
ErasureCodingPolicyState.ENABLED,
|
||||
DFSTestUtil.getECPolicyState(ecPolicy));
|
||||
// Read file regardless of the erasure coding policy state
|
||||
DFSTestUtil.readFileAsBytes(fs, filePath);
|
||||
|
||||
// 2. Disable an erasure coding policy
|
||||
fs.disableErasureCodingPolicy(ecPolicy.getName());
|
||||
targetPolicy.setState(ErasureCodingPolicyState.DISABLED);
|
||||
// Save namespace and restart NameNode
|
||||
fs.setSafeMode(SafeModeAction.SAFEMODE_ENTER);
|
||||
fs.saveNamespace();
|
||||
@ -929,7 +928,8 @@ private void testChangeErasureCodingPolicyState(MiniDFSCluster cluster,
|
||||
assertEquals("The erasure coding policy is not found",
|
||||
targetPolicy, ecPolicy);
|
||||
assertEquals("The erasure coding policy should be of disabled state",
|
||||
ErasureCodingPolicyState.DISABLED, ecPolicy.getState());
|
||||
ErasureCodingPolicyState.DISABLED,
|
||||
DFSTestUtil.getECPolicyState(ecPolicy));
|
||||
// Read file regardless of the erasure coding policy state
|
||||
DFSTestUtil.readFileAsBytes(fs, filePath);
|
||||
|
||||
@ -944,7 +944,7 @@ private void testChangeErasureCodingPolicyState(MiniDFSCluster cluster,
|
||||
return;
|
||||
}
|
||||
|
||||
targetPolicy.setState(ErasureCodingPolicyState.REMOVED);
|
||||
fs.removeErasureCodingPolicy(ecPolicy.getName());
|
||||
// Save namespace and restart NameNode
|
||||
fs.setSafeMode(SafeModeAction.SAFEMODE_ENTER);
|
||||
fs.saveNamespace();
|
||||
@ -957,7 +957,8 @@ private void testChangeErasureCodingPolicyState(MiniDFSCluster cluster,
|
||||
assertEquals("The erasure coding policy saved into and loaded from " +
|
||||
"fsImage is bad", targetPolicy, ecPolicy);
|
||||
assertEquals("The erasure coding policy should be of removed state",
|
||||
ErasureCodingPolicyState.REMOVED, ecPolicy.getState());
|
||||
ErasureCodingPolicyState.REMOVED,
|
||||
DFSTestUtil.getECPolicyState(ecPolicy));
|
||||
// Read file regardless of the erasure coding policy state
|
||||
DFSTestUtil.readFileAsBytes(fs, filePath);
|
||||
fs.delete(dirPath, true);
|
||||
|
@ -41,6 +41,7 @@
|
||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||
import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicyInfo;
|
||||
import org.apache.hadoop.io.LongWritable;
|
||||
import org.apache.hadoop.io.SequenceFile;
|
||||
import org.apache.hadoop.io.SequenceFile.CompressionType;
|
||||
@ -925,11 +926,11 @@ static float msToSecs(long timeMillis) {
|
||||
|
||||
private boolean checkErasureCodePolicy(String erasureCodePolicyName,
|
||||
FileSystem fs, TestType testType) throws IOException {
|
||||
Collection<ErasureCodingPolicy> list =
|
||||
Collection<ErasureCodingPolicyInfo> list =
|
||||
((DistributedFileSystem) fs).getAllErasureCodingPolicies();
|
||||
boolean isValid = false;
|
||||
for (ErasureCodingPolicy ec : list) {
|
||||
if (erasureCodePolicyName.equals(ec.getName())) {
|
||||
for (ErasureCodingPolicyInfo ec : list) {
|
||||
if (erasureCodePolicyName.equals(ec.getPolicy().getName())) {
|
||||
isValid = true;
|
||||
break;
|
||||
}
|
||||
@ -939,8 +940,8 @@ private boolean checkErasureCodePolicy(String erasureCodePolicyName,
|
||||
System.out.println("Invalid erasure code policy: " +
|
||||
erasureCodePolicyName);
|
||||
System.out.println("Current supported erasure code policy list: ");
|
||||
for (ErasureCodingPolicy ec : list) {
|
||||
System.out.println(ec.getName());
|
||||
for (ErasureCodingPolicyInfo ec : list) {
|
||||
System.out.println(ec.getPolicy().getName());
|
||||
}
|
||||
return false;
|
||||
}
|
||||
@ -999,9 +1000,10 @@ void createAndEnableECOnPath(FileSystem fs, Path path)
|
||||
getConf().get(ERASURE_CODE_POLICY_NAME_KEY, null);
|
||||
|
||||
fs.mkdirs(path);
|
||||
Collection<ErasureCodingPolicy> list =
|
||||
Collection<ErasureCodingPolicyInfo> list =
|
||||
((DistributedFileSystem) fs).getAllErasureCodingPolicies();
|
||||
for (ErasureCodingPolicy ec : list) {
|
||||
for (ErasureCodingPolicyInfo info : list) {
|
||||
final ErasureCodingPolicy ec = info.getPolicy();
|
||||
if (erasureCodePolicyName.equals(ec.getName())) {
|
||||
((DistributedFileSystem) fs).setErasureCodingPolicy(path, ec.getName());
|
||||
LOG.info("enable erasureCodePolicy = " + erasureCodePolicyName +
|
||||
|
Loading…
Reference in New Issue
Block a user