HDFS-12223. Rebasing HDFS-10467. Contributed by Inigo Goiri.
(cherry picked from commit 47db6e9d8e2c264671c89fdd6cb11a7c762d2cce)
This commit is contained in:
parent
ca4f209b49
commit
0ec82b8cdf
@ -64,8 +64,9 @@
|
|||||||
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
import org.apache.hadoop.hdfs.DFSConfigKeys;
|
||||||
import org.apache.hadoop.hdfs.DFSUtil;
|
import org.apache.hadoop.hdfs.DFSUtil;
|
||||||
import org.apache.hadoop.hdfs.inotify.EventBatchList;
|
import org.apache.hadoop.hdfs.inotify.EventBatchList;
|
||||||
import org.apache.hadoop.hdfs.protocol.AddingECPolicyResponse;
|
import org.apache.hadoop.hdfs.protocol.AddECPolicyResponse;
|
||||||
import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
|
import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.BlocksStats;
|
||||||
import org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry;
|
import org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry;
|
||||||
import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo;
|
import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo;
|
||||||
import org.apache.hadoop.hdfs.protocol.CachePoolEntry;
|
import org.apache.hadoop.hdfs.protocol.CachePoolEntry;
|
||||||
@ -75,6 +76,7 @@
|
|||||||
import org.apache.hadoop.hdfs.protocol.DatanodeID;
|
import org.apache.hadoop.hdfs.protocol.DatanodeID;
|
||||||
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
|
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
|
||||||
import org.apache.hadoop.hdfs.protocol.DirectoryListing;
|
import org.apache.hadoop.hdfs.protocol.DirectoryListing;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.ECBlockGroupsStats;
|
||||||
import org.apache.hadoop.hdfs.protocol.EncryptionZone;
|
import org.apache.hadoop.hdfs.protocol.EncryptionZone;
|
||||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
||||||
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
|
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
|
||||||
@ -85,6 +87,7 @@
|
|||||||
import org.apache.hadoop.hdfs.protocol.LastBlockWithStatus;
|
import org.apache.hadoop.hdfs.protocol.LastBlockWithStatus;
|
||||||
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
|
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
|
||||||
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
|
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
|
||||||
|
import org.apache.hadoop.hdfs.protocol.OpenFileEntry;
|
||||||
import org.apache.hadoop.hdfs.protocol.RollingUpgradeInfo;
|
import org.apache.hadoop.hdfs.protocol.RollingUpgradeInfo;
|
||||||
import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport;
|
import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport;
|
||||||
import org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus;
|
import org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus;
|
||||||
@ -1735,13 +1738,6 @@ public void setErasureCodingPolicy(String src, String ecPolicyName)
|
|||||||
checkOperation(OperationCategory.WRITE, false);
|
checkOperation(OperationCategory.WRITE, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override // ClientProtocol
|
|
||||||
public AddingECPolicyResponse[] addErasureCodingPolicies(
|
|
||||||
ErasureCodingPolicy[] policies) throws IOException {
|
|
||||||
checkOperation(OperationCategory.WRITE, false);
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override // ClientProtocol
|
@Override // ClientProtocol
|
||||||
public void unsetErasureCodingPolicy(String src) throws IOException {
|
public void unsetErasureCodingPolicy(String src) throws IOException {
|
||||||
checkOperation(OperationCategory.WRITE, false);
|
checkOperation(OperationCategory.WRITE, false);
|
||||||
@ -1808,6 +1804,53 @@ public BlockStoragePolicy getStoragePolicy(String path) throws IOException {
|
|||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public AddECPolicyResponse[] addErasureCodingPolicies(
|
||||||
|
ErasureCodingPolicy[] arg0) throws IOException {
|
||||||
|
checkOperation(OperationCategory.WRITE, false);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void removeErasureCodingPolicy(String arg0) throws IOException {
|
||||||
|
checkOperation(OperationCategory.WRITE, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void disableErasureCodingPolicy(String arg0) throws IOException {
|
||||||
|
checkOperation(OperationCategory.WRITE, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void enableErasureCodingPolicy(String arg0) throws IOException {
|
||||||
|
checkOperation(OperationCategory.WRITE, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ECBlockGroupsStats getECBlockGroupsStats() throws IOException {
|
||||||
|
checkOperation(OperationCategory.READ, false);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public HashMap<String, String> getErasureCodingCodecs() throws IOException {
|
||||||
|
checkOperation(OperationCategory.READ, false);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public BlocksStats getBlocksStats() throws IOException {
|
||||||
|
checkOperation(OperationCategory.READ, false);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public BatchedEntries<OpenFileEntry> listOpenFiles(long arg0)
|
||||||
|
throws IOException {
|
||||||
|
checkOperation(OperationCategory.READ, false);
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Locate the location with the matching block pool id.
|
* Locate the location with the matching block pool id.
|
||||||
*
|
*
|
||||||
|
Loading…
Reference in New Issue
Block a user