HDFS-6924. Add new RAM_DISK storage type. (Arpit Agarwal)
This commit is contained in:
parent
a7bcc95358
commit
7f49537ba1
@ -2,3 +2,7 @@
|
||||
|
||||
HDFS-6921. Add LazyPersist flag to FileStatus. (Arpit Agarwal)
|
||||
|
||||
HDFS-6924. Add new RAM_DISK storage type. (Arpit Agarwal)
|
||||
|
||||
|
||||
|
||||
|
@ -32,7 +32,8 @@
|
||||
@InterfaceStability.Unstable
|
||||
public enum StorageType {
|
||||
DISK,
|
||||
SSD;
|
||||
SSD,
|
||||
RAM_DISK;
|
||||
|
||||
public static final StorageType DEFAULT = DISK;
|
||||
public static final StorageType[] EMPTY_ARRAY = {};
|
||||
|
@ -1703,6 +1703,8 @@ public static StorageTypeProto convertStorageType(StorageType type) {
|
||||
return StorageTypeProto.DISK;
|
||||
case SSD:
|
||||
return StorageTypeProto.SSD;
|
||||
case RAM_DISK:
|
||||
return StorageTypeProto.RAM_DISK;
|
||||
default:
|
||||
throw new IllegalStateException(
|
||||
"BUG: StorageType not found, type=" + type);
|
||||
@ -1731,6 +1733,8 @@ public static StorageType convertStorageType(StorageTypeProto type) {
|
||||
return StorageType.DISK;
|
||||
case SSD:
|
||||
return StorageType.SSD;
|
||||
case RAM_DISK:
|
||||
return StorageType.RAM_DISK;
|
||||
default:
|
||||
throw new IllegalStateException(
|
||||
"BUG: StorageTypeProto not found, type=" + type);
|
||||
|
@ -158,6 +158,7 @@ message FsPermissionProto {
|
||||
enum StorageTypeProto {
|
||||
DISK = 1;
|
||||
SSD = 2;
|
||||
RAM_DISK = 3;
|
||||
}
|
||||
|
||||
/**
|
||||
@ -260,7 +261,6 @@ message HdfsFileStatusProto {
|
||||
// Optional field for fileId
|
||||
optional uint64 fileId = 13 [default = 0]; // default as an invalid id
|
||||
optional int32 childrenNum = 14 [default = -1];
|
||||
|
||||
// Optional field for file encryption
|
||||
optional FileEncryptionInfoProto fileEncryptionInfo = 15;
|
||||
optional bool isLazyPersist = 16 [default = false];
|
||||
|
@ -448,13 +448,16 @@ private LocatedBlock createLocatedBlock() {
|
||||
DFSTestUtil.getLocalDatanodeInfo("127.0.0.1", "h2",
|
||||
AdminStates.DECOMMISSIONED),
|
||||
DFSTestUtil.getLocalDatanodeInfo("127.0.0.1", "h3",
|
||||
AdminStates.NORMAL)
|
||||
AdminStates.NORMAL),
|
||||
DFSTestUtil.getLocalDatanodeInfo("127.0.0.1", "h4",
|
||||
AdminStates.NORMAL),
|
||||
};
|
||||
String[] storageIDs = {"s1", "s2", "s3"};
|
||||
String[] storageIDs = {"s1", "s2", "s3", "s4"};
|
||||
StorageType[] media = {
|
||||
StorageType.DISK,
|
||||
StorageType.SSD,
|
||||
StorageType.DISK
|
||||
StorageType.DISK,
|
||||
StorageType.RAM_DISK
|
||||
};
|
||||
LocatedBlock lb = new LocatedBlock(
|
||||
new ExtendedBlock("bp12", 12345, 10, 53),
|
||||
|
@ -44,10 +44,11 @@ public void testDataDirParsing() throws Throwable {
|
||||
File dir1 = new File("/dir1");
|
||||
File dir2 = new File("/dir2");
|
||||
File dir3 = new File("/dir3");
|
||||
File dir4 = new File("/dir4");
|
||||
|
||||
// Verify that a valid string is correctly parsed, and that storage
|
||||
// type is not case-sensitive
|
||||
String locations1 = "[disk]/dir0,[DISK]/dir1,[sSd]/dir2,[disK]/dir3";
|
||||
String locations1 = "[disk]/dir0,[DISK]/dir1,[sSd]/dir2,[disK]/dir3,[ram_disk]/dir4";
|
||||
conf.set(DFS_DATANODE_DATA_DIR_KEY, locations1);
|
||||
locations = DataNode.getStorageLocations(conf);
|
||||
assertThat(locations.size(), is(4));
|
||||
@ -59,6 +60,8 @@ public void testDataDirParsing() throws Throwable {
|
||||
assertThat(locations.get(2).getUri(), is(dir2.toURI()));
|
||||
assertThat(locations.get(3).getStorageType(), is(StorageType.DISK));
|
||||
assertThat(locations.get(3).getUri(), is(dir3.toURI()));
|
||||
assertThat(locations.get(4).getStorageType(), is(StorageType.RAM_DISK));
|
||||
assertThat(locations.get(4).getUri(), is(dir4.toURI()));
|
||||
|
||||
// Verify that an unrecognized storage type result in an exception.
|
||||
String locations2 = "[BadMediaType]/dir0,[ssd]/dir1,[disk]/dir2";
|
||||
|
Loading…
Reference in New Issue
Block a user