From b74e47e31abd31617c0cdc648a3b1d48bb311d65 Mon Sep 17 00:00:00 2001 From: Takanobu Asanuma Date: Sun, 22 Mar 2020 01:44:55 +0900 Subject: [PATCH] HDFS-15214. WebHDFS: Add snapshot counts to Content Summary. Contributed by hemanthboyina. --- .../hadoop/hdfs/web/JsonUtilClient.java | 19 ++++++++++++++ .../org/apache/hadoop/hdfs/web/JsonUtil.java | 5 ++++ .../apache/hadoop/hdfs/web/TestJsonUtil.java | 25 ++++++++++++++----- .../apache/hadoop/hdfs/web/TestWebHDFS.java | 24 ++++++++++++++++++ 4 files changed, 67 insertions(+), 6 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java index d45669f308..e846b56200 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java @@ -440,6 +440,25 @@ static ContentSummary toContentSummary(final Map json) { .directoryCount(directoryCount) .erasureCodingPolicy(ecPolicy); builder = buildQuotaUsage(builder, m, ContentSummary.Builder.class); + if (m.get("snapshotLength") != null) { + long snapshotLength = ((Number) m.get("snapshotLength")).longValue(); + builder.snapshotLength(snapshotLength); + } + if (m.get("snapshotFileCount") != null) { + long snapshotFileCount = + ((Number) m.get("snapshotFileCount")).longValue(); + builder.snapshotFileCount(snapshotFileCount); + } + if (m.get("snapshotDirectoryCount") != null) { + long snapshotDirectoryCount = + ((Number) m.get("snapshotDirectoryCount")).longValue(); + builder.snapshotDirectoryCount(snapshotDirectoryCount); + } + if (m.get("snapshotSpaceConsumed") != null) { + long snapshotSpaceConsumed = + ((Number) m.get("snapshotSpaceConsumed")).longValue(); + builder.snapshotSpaceConsumed(snapshotSpaceConsumed); + } return builder.build(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java index 4a5b757982..bf5568bc3e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java @@ -358,6 +358,11 @@ public static String toJsonString(final ContentSummary contentsummary) { // For ContentSummary we don't need this since we already have // separate count for file and directory. m.putAll(toJsonMap(contentsummary, false)); + m.put("snapshotLength", contentsummary.getSnapshotLength()); + m.put("snapshotFileCount", contentsummary.getSnapshotFileCount()); + m.put("snapshotDirectoryCount", + contentsummary.getSnapshotDirectoryCount()); + m.put("snapshotSpaceConsumed", contentsummary.getSnapshotSpaceConsumed()); return toJsonString(ContentSummary.class, m); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java index 2a3680cdb1..ca256ab3da 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java @@ -306,7 +306,13 @@ public void testToJsonFromAclStatus() { @Test public void testToJsonFromContentSummary() { String jsonString = - "{\"ContentSummary\":{\"directoryCount\":33333,\"ecPolicy\":\"RS-6-3-1024k\",\"fileCount\":22222,\"length\":11111,\"quota\":44444,\"spaceConsumed\":55555,\"spaceQuota\":66666,\"typeQuota\":{}}}"; + "{\"ContentSummary\":{\"directoryCount\":33333,\"ecPolicy\":" + + "\"RS-6-3-1024k\",\"fileCount\":22222,\"length\":11111," + + "\"quota\":44444,\"snapshotDirectoryCount\":1," + + "\"snapshotFileCount\":2,\"snapshotLength\":10," + + "\"snapshotSpaceConsumed\":30,\"spaceConsumed\":55555," + + "\"spaceQuota\":66666,\"typeQuota\":{}}}"; + long length = 11111; long fileCount = 22222; long directoryCount = 33333; @@ -314,15 +320,22 @@ public void testToJsonFromContentSummary() { long spaceConsumed = 55555; long spaceQuota = 66666; String ecPolicy = "RS-6-3-1024k"; + long snapshotLength = 10; + long snapshotFileCount = 2; + long snapshotDirectoryCount = 1; + long snapshotSpaceConsumed = 30; - ContentSummary contentSummary = new ContentSummary.Builder().length(length). - fileCount(fileCount).directoryCount(directoryCount).quota(quota). - spaceConsumed(spaceConsumed).spaceQuota(spaceQuota). - erasureCodingPolicy(ecPolicy).build(); + ContentSummary contentSummary = new ContentSummary.Builder().length(length) + .fileCount(fileCount).directoryCount(directoryCount).quota(quota) + .spaceConsumed(spaceConsumed).spaceQuota(spaceQuota) + .erasureCodingPolicy(ecPolicy).snapshotLength(snapshotLength) + .snapshotFileCount(snapshotFileCount) + .snapshotDirectoryCount(snapshotDirectoryCount) + .snapshotSpaceConsumed(snapshotSpaceConsumed).build(); Assert.assertEquals(jsonString, JsonUtil.toJsonString(contentSummary)); } - + @Test public void testToJsonFromXAttrs() throws IOException { String jsonString = diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java index 2b9b51fb5f..3ee7fcbd61 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java @@ -994,6 +994,30 @@ public void testContentSummary() throws Exception { .assertTrue((contentSummary.getTypeQuota(StorageType.DISK) == 100000)); } + /** + * Test Snapshot related information in ContentSummary. + */ + @Test + public void testSnapshotInContentSummary() throws Exception { + final Configuration conf = WebHdfsTestUtil.createConf(); + Path dirPath = new Path("/dir"); + final Path filePath = new Path("/dir/file"); + cluster = new MiniDFSCluster.Builder(conf).numDataNodes(3).build(); + final WebHdfsFileSystem webHdfs = WebHdfsTestUtil.getWebHdfsFileSystem(conf, + WebHdfsConstants.WEBHDFS_SCHEME); + final DistributedFileSystem dfs = cluster.getFileSystem(); + DFSTestUtil.createFile(dfs, filePath, 10, (short) 3, 0L); + dfs.allowSnapshot(dirPath); + dfs.createSnapshot(dirPath); + dfs.delete(filePath, true); + ContentSummary contentSummary = webHdfs.getContentSummary(dirPath); + assertEquals(1, contentSummary.getSnapshotFileCount()); + assertEquals(10, contentSummary.getSnapshotLength()); + assertEquals(30, contentSummary.getSnapshotSpaceConsumed()); + assertEquals(dfs.getContentSummary(dirPath), + webHdfs.getContentSummary(dirPath)); + } + @Test public void testQuotaUsage() throws Exception { final Configuration conf = WebHdfsTestUtil.createConf();