From 0fc988e6a3dc6b435cbeea680549c06ef6147e3f Mon Sep 17 00:00:00 2001 From: Arpit Agarwal Date: Wed, 16 May 2018 11:28:39 -0700 Subject: [PATCH] HDFS-13512. WebHdfs getFileStatus doesn't return ecPolicy. Contributed by Ajay Kumar. --- .../hadoop/hdfs/web/JsonUtilClient.java | 16 +++++++ .../org/apache/hadoop/hdfs/web/JsonUtil.java | 19 ++++++++ .../apache/hadoop/hdfs/web/TestJsonUtil.java | 48 ++++++++++++++++++- 3 files changed, 82 insertions(+), 1 deletion(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java index 13c5226cfe..9bb1846fc6 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java @@ -37,6 +37,7 @@ import org.apache.hadoop.hdfs.DFSUtilClient; import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; +import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.apache.hadoop.hdfs.protocol.SnapshotDiffReport; import org.apache.hadoop.hdfs.protocol.SnapshottableDirectoryStatus; import org.apache.hadoop.hdfs.protocol.DatanodeInfo.DatanodeInfoBuilder; @@ -48,6 +49,7 @@ import org.apache.hadoop.hdfs.protocol.LocatedBlocks; import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier; import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier; +import org.apache.hadoop.io.erasurecode.ECSchema; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.security.token.Token; import org.apache.hadoop.security.token.TokenIdentifier; @@ -143,6 +145,19 @@ static HdfsFileStatus toFileStatus(final Map json, f.add(HdfsFileStatus.Flags.HAS_EC); } + Map ecPolicyObj = (Map) m.get("ecPolicyObj"); + ErasureCodingPolicy ecPolicy = null; + if (ecPolicyObj != null) { + Map extraOptions = (Map) ecPolicyObj.get("extraOptions"); + ECSchema ecSchema = new ECSchema((String) ecPolicyObj.get("codecName"), + (int) ecPolicyObj.get("numDataUnits"), + (int) ecPolicyObj.get("numParityUnits"), extraOptions); + ecPolicy = new ErasureCodingPolicy((String) ecPolicyObj.get("name"), + ecSchema, (int) ecPolicyObj.get("cellSize"), + (byte) (int) ecPolicyObj.get("id")); + + } + final long aTime = ((Number) m.get("accessTime")).longValue(); final long mTime = ((Number) m.get("modificationTime")).longValue(); final long blockSize = ((Number) m.get("blockSize")).longValue(); @@ -170,6 +185,7 @@ static HdfsFileStatus toFileStatus(final Map json, .fileId(fileId) .children(childrenNum) .storagePolicy(storagePolicy) + .ecPolicy(ecPolicy) .build(); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java index 43a252b1c3..5c810bf0c2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/web/JsonUtil.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hdfs.web; +import com.google.common.collect.ImmutableMap; import org.apache.hadoop.fs.BlockLocation; import org.apache.hadoop.fs.ContentSummary; import org.apache.hadoop.fs.FileChecksum; @@ -135,7 +136,10 @@ private static Map toJsonMap(HdfsFileStatus status) { if (status.isErasureCoded()) { m.put("ecBit", true); if (status.getErasureCodingPolicy() != null) { + // to maintain backward comparability m.put("ecPolicy", status.getErasureCodingPolicy().getName()); + // to re-construct HdfsFileStatus object via WebHdfs + m.put("ecPolicyObj", getEcPolicyAsMap(status.getErasureCodingPolicy())); } } if (status.isSnapshotEnabled()) { @@ -152,6 +156,21 @@ private static Map toJsonMap(HdfsFileStatus status) { return m; } + private static Map getEcPolicyAsMap( + final ErasureCodingPolicy ecPolicy) { + /** Convert an ErasureCodingPolicy to a map. */ + ImmutableMap.Builder builder = ImmutableMap.builder(); + builder.put("name", ecPolicy.getName()) + .put("cellSize", ecPolicy.getCellSize()) + .put("numDataUnits", ecPolicy.getNumDataUnits()) + .put("numParityUnits", ecPolicy.getNumParityUnits()) + .put("codecName", ecPolicy.getCodecName()) + .put("id", ecPolicy.getId()) + .put("extraOptions", ecPolicy.getSchema().getExtraOptions()); + return builder.build(); + + } + /** Convert an ExtendedBlock to a Json map. */ private static Map toJsonMap(final ExtendedBlock extendedblock) { if (extendedblock == null) { diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java index 2d9c8b1af9..e1dc2716ab 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestJsonUtil.java @@ -23,6 +23,7 @@ import static org.apache.hadoop.hdfs.server.namenode.AclTestHelpers.*; import java.io.IOException; +import java.util.EnumSet; import java.util.HashMap; import java.util.Iterator; import java.util.List; @@ -40,8 +41,11 @@ import org.apache.hadoop.hdfs.DFSUtilClient; import org.apache.hadoop.hdfs.XAttrHelper; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; +import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.apache.hadoop.hdfs.protocol.HdfsConstants; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; +import org.apache.hadoop.hdfs.protocol.HdfsFileStatus.Flags; +import org.apache.hadoop.io.erasurecode.ECSchema; import org.apache.hadoop.util.Time; import org.junit.Assert; import org.junit.Test; @@ -66,9 +70,48 @@ static FileStatus toFileStatus(HdfsFileStatus f, String parent) { } @Test - public void testHdfsFileStatus() throws IOException { + public void testHdfsFileStatusWithEcPolicy() throws IOException { final long now = Time.now(); final String parent = "/dir"; + ErasureCodingPolicy dummyEcPolicy = new ErasureCodingPolicy("ecPolicy1", + new ECSchema("EcSchema", 1, 1), 1024 * 2, (byte) 1); + final HdfsFileStatus status = new HdfsFileStatus.Builder() + .length(1001L) + .replication(3) + .blocksize(1L << 26) + .mtime(now) + .atime(now + 10) + .perm(new FsPermission((short) 0644)) + .owner("user") + .group("group") + .symlink(DFSUtil.string2Bytes("bar")) + .path(DFSUtil.string2Bytes("foo")) + .fileId(HdfsConstants.GRANDFATHER_INODE_ID) + .ecPolicy(dummyEcPolicy) + .flags(EnumSet.allOf(Flags.class)) + .build(); + + final FileStatus fstatus = toFileStatus(status, parent); + System.out.println("status = " + status); + System.out.println("fstatus = " + fstatus); + final String json = JsonUtil.toJsonString(status, true); + System.out.println("json = " + json.replace(",", ",\n ")); + final HdfsFileStatus s2 = + JsonUtilClient.toFileStatus((Map) READER.readValue(json), true); + final FileStatus fs2 = toFileStatus(s2, parent); + System.out.println("s2 = " + s2); + System.out.println("fs2 = " + fs2); + Assert.assertEquals(status.getErasureCodingPolicy(), + s2.getErasureCodingPolicy()); + Assert.assertEquals(fstatus, fs2); + } + + @Test + public void testHdfsFileStatusWithoutEcPolicy() throws IOException { + final long now = Time.now(); + final String parent = "/dir"; + ErasureCodingPolicy dummyEcPolicy = new ErasureCodingPolicy("ecPolicy1", + new ECSchema("EcSchema", 1, 1), 1024 * 2, (byte) 1); final HdfsFileStatus status = new HdfsFileStatus.Builder() .length(1001L) .replication(3) @@ -82,6 +125,8 @@ public void testHdfsFileStatus() throws IOException { .path(DFSUtil.string2Bytes("foo")) .fileId(HdfsConstants.GRANDFATHER_INODE_ID) .build(); + Assert.assertTrue(status.getErasureCodingPolicy() == null); + final FileStatus fstatus = toFileStatus(status, parent); System.out.println("status = " + status); System.out.println("fstatus = " + fstatus); @@ -92,6 +137,7 @@ public void testHdfsFileStatus() throws IOException { final FileStatus fs2 = toFileStatus(s2, parent); System.out.println("s2 = " + s2); System.out.println("fs2 = " + fs2); + Assert.assertEquals(fstatus, fs2); }