From 7f49537ba18f830dff172f5f9c4a387fe7ab410f Mon Sep 17 00:00:00 2001 From: arp Date: Wed, 27 Aug 2014 09:03:45 -0700 Subject: [PATCH] HDFS-6924. Add new RAM_DISK storage type. (Arpit Agarwal) --- hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-6581.txt | 4 ++++ .../main/java/org/apache/hadoop/hdfs/StorageType.java | 3 ++- .../java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java | 4 ++++ .../hadoop-hdfs/src/main/proto/hdfs.proto | 2 +- .../org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java | 9 ++++++--- .../apache/hadoop/hdfs/server/datanode/TestDataDirs.java | 5 ++++- 6 files changed, 21 insertions(+), 6 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-6581.txt b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-6581.txt index 706c03a606..fc6e0e0af9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-6581.txt +++ b/hadoop-hdfs-project/hadoop-hdfs/CHANGES-HDFS-6581.txt @@ -2,3 +2,7 @@ HDFS-6921. Add LazyPersist flag to FileStatus. (Arpit Agarwal) + HDFS-6924. Add new RAM_DISK storage type. (Arpit Agarwal) + + + diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/StorageType.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/StorageType.java index 3d8133c7ce..51724f7401 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/StorageType.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/StorageType.java @@ -32,7 +32,8 @@ @InterfaceStability.Unstable public enum StorageType { DISK, - SSD; + SSD, + RAM_DISK; public static final StorageType DEFAULT = DISK; public static final StorageType[] EMPTY_ARRAY = {}; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java index 5efede7211..51675974d0 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/protocolPB/PBHelper.java @@ -1703,6 +1703,8 @@ public static StorageTypeProto convertStorageType(StorageType type) { return StorageTypeProto.DISK; case SSD: return StorageTypeProto.SSD; + case RAM_DISK: + return StorageTypeProto.RAM_DISK; default: throw new IllegalStateException( "BUG: StorageType not found, type=" + type); @@ -1731,6 +1733,8 @@ public static StorageType convertStorageType(StorageTypeProto type) { return StorageType.DISK; case SSD: return StorageType.SSD; + case RAM_DISK: + return StorageType.RAM_DISK; default: throw new IllegalStateException( "BUG: StorageTypeProto not found, type=" + type); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/hdfs.proto b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/hdfs.proto index cbb51f9020..b54638effe 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/hdfs.proto +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/proto/hdfs.proto @@ -158,6 +158,7 @@ message FsPermissionProto { enum StorageTypeProto { DISK = 1; SSD = 2; + RAM_DISK = 3; } /** @@ -260,7 +261,6 @@ message HdfsFileStatusProto { // Optional field for fileId optional uint64 fileId = 13 [default = 0]; // default as an invalid id optional int32 childrenNum = 14 [default = -1]; - // Optional field for file encryption optional FileEncryptionInfoProto fileEncryptionInfo = 15; optional bool isLazyPersist = 16 [default = false]; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java index cb85c7deb6..98fd59a8bd 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/protocolPB/TestPBHelper.java @@ -448,13 +448,16 @@ private LocatedBlock createLocatedBlock() { DFSTestUtil.getLocalDatanodeInfo("127.0.0.1", "h2", AdminStates.DECOMMISSIONED), DFSTestUtil.getLocalDatanodeInfo("127.0.0.1", "h3", - AdminStates.NORMAL) + AdminStates.NORMAL), + DFSTestUtil.getLocalDatanodeInfo("127.0.0.1", "h4", + AdminStates.NORMAL), }; - String[] storageIDs = {"s1", "s2", "s3"}; + String[] storageIDs = {"s1", "s2", "s3", "s4"}; StorageType[] media = { StorageType.DISK, StorageType.SSD, - StorageType.DISK + StorageType.DISK, + StorageType.RAM_DISK }; LocatedBlock lb = new LocatedBlock( new ExtendedBlock("bp12", 12345, 10, 53), diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataDirs.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataDirs.java index 53babb471d..c0b4f9a869 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataDirs.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/datanode/TestDataDirs.java @@ -44,10 +44,11 @@ public void testDataDirParsing() throws Throwable { File dir1 = new File("/dir1"); File dir2 = new File("/dir2"); File dir3 = new File("/dir3"); + File dir4 = new File("/dir4"); // Verify that a valid string is correctly parsed, and that storage // type is not case-sensitive - String locations1 = "[disk]/dir0,[DISK]/dir1,[sSd]/dir2,[disK]/dir3"; + String locations1 = "[disk]/dir0,[DISK]/dir1,[sSd]/dir2,[disK]/dir3,[ram_disk]/dir4"; conf.set(DFS_DATANODE_DATA_DIR_KEY, locations1); locations = DataNode.getStorageLocations(conf); assertThat(locations.size(), is(4)); @@ -59,6 +60,8 @@ public void testDataDirParsing() throws Throwable { assertThat(locations.get(2).getUri(), is(dir2.toURI())); assertThat(locations.get(3).getStorageType(), is(StorageType.DISK)); assertThat(locations.get(3).getUri(), is(dir3.toURI())); + assertThat(locations.get(4).getStorageType(), is(StorageType.RAM_DISK)); + assertThat(locations.get(4).getUri(), is(dir4.toURI())); // Verify that an unrecognized storage type result in an exception. String locations2 = "[BadMediaType]/dir0,[ssd]/dir1,[disk]/dir2";