From dfb351c3a8c510b712e853ecd338e6447315d062 Mon Sep 17 00:00:00 2001 From: zhtttylz Date: Fri, 14 Jul 2023 14:23:36 +0800 Subject: [PATCH] HDFS-17083. Support getErasureCodeCodecs API in WebHDFS (#5836). Contributed by Hualong Zhang. Reviewed-by: Shilun Fan Signed-off-by: Ayush Saxena --- .../hadoop/hdfs/web/JsonUtilClient.java | 10 ++++++ .../hadoop/hdfs/web/WebHdfsFileSystem.java | 13 ++++++++ .../hadoop/hdfs/web/resources/GetOpParam.java | 1 + .../web/resources/NamenodeWebHdfsMethods.java | 6 ++++ .../hadoop-hdfs/src/site/markdown/WebHDFS.md | 33 +++++++++++++++++++ .../apache/hadoop/hdfs/web/TestWebHDFS.java | 24 ++++++++++++++ 6 files changed, 87 insertions(+) diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java index 81c40fe634..90dcd83ddb 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/JsonUtilClient.java @@ -75,6 +75,7 @@ import java.util.Collection; import java.util.Collections; import java.util.EnumSet; +import java.util.HashMap; import java.util.List; import java.util.Map; @@ -867,6 +868,15 @@ public static ErasureCodingPolicyInfo toECPolicyInfo(Map m) { return new ErasureCodingPolicyInfo(ecPolicy, ecPolicyState); } + public static Map getErasureCodeCodecs(Map json) { + Map map = new HashMap<>(); + Map m = (Map) json.get("ErasureCodingCodecs"); + m.forEach((key, value) -> { + map.put((String) key, (String) value); + }); + return map; + } + private static List toDiffList( List objs) { if (objs == null) { diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java index f65ec98a9d..5210692ab3 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/WebHdfsFileSystem.java @@ -2206,6 +2206,19 @@ Collection decodeResponse(Map json) { }.run(); } + public Map getAllErasureCodingCodecs() + throws IOException { + statistics.incrementReadOps(1); + storageStatistics.incrementOpCounter(OpType.GET_EC_CODECS); + final HttpOpParam.Op op = GetOpParam.Op.GETECCODECS; + return new FsPathResponseRunner>(op, null) { + @Override + Map decodeResponse(Map json) { + return JsonUtilClient.getErasureCodeCodecs(json); + } + }.run(); + } + @VisibleForTesting InetSocketAddress[] getResolvedNNAddr() { return nnAddrs; diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/GetOpParam.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/GetOpParam.java index 093609843a..3efe37b61f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/GetOpParam.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/web/resources/GetOpParam.java @@ -68,6 +68,7 @@ public enum Op implements HttpOpParam.Op { GETFILELINKSTATUS(false, HttpURLConnection.HTTP_OK), GETSTATUS(false, HttpURLConnection.HTTP_OK), GETECPOLICIES(false, HttpURLConnection.HTTP_OK), + GETECCODECS(false, HttpURLConnection.HTTP_OK), GETSNAPSHOTLIST(false, HttpURLConnection.HTTP_OK); final boolean redirect; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java index 8495256d4b..c00ff5d8ec 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/web/resources/NamenodeWebHdfsMethods.java @@ -33,6 +33,7 @@ import java.util.EnumSet; import java.util.HashSet; import java.util.List; +import java.util.Map; import java.util.concurrent.ExecutionException; import javax.servlet.ServletContext; @@ -1413,6 +1414,11 @@ protected Response get( final String js = JsonUtil.toJsonString(ecPolicyInfos); return Response.ok(js).type(MediaType.APPLICATION_JSON).build(); } + case GETECCODECS: { + Map ecCodecs = cp.getErasureCodingCodecs(); + final String js = JsonUtil.toJsonString("ErasureCodingCodecs", ecCodecs); + return Response.ok(js).type(MediaType.APPLICATION_JSON).build(); + } default: throw new UnsupportedOperationException(op + " is not supported"); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/WebHDFS.md b/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/WebHDFS.md index baee66211a..969d288b4c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/WebHDFS.md +++ b/hadoop-hdfs-project/hadoop-hdfs/src/site/markdown/WebHDFS.md @@ -62,6 +62,7 @@ The HTTP REST API supports the complete [FileSystem](../../api/org/apache/hadoop * [`GETFILELINKSTATUS`](#Get_File_Link_Status) (see [FileSystem](../../api/org/apache/hadoop/fs/FileSystem.html).getFileLinkStatus) * [`GETSTATUS`](#Get_Status) (see [FileSystem](../../api/org/apache/hadoop/fs/FileSystem.html).getStatus) * [`GETECPOLICIES`](#Get_EC_Policies) + * [`GETECCODECS`](#Get_EC_Codecs) * HTTP PUT * [`CREATE`](#Create_and_Write_to_a_File) (see [FileSystem](../../api/org/apache/hadoop/fs/FileSystem.html).create) * [`MKDIRS`](#Make_a_Directory) (see [FileSystem](../../api/org/apache/hadoop/fs/FileSystem.html).mkdirs) @@ -1252,6 +1253,26 @@ See also: [FileSystem](../../api/org/apache/hadoop/fs/FileSystem.html).getFileLi See also: [FileSystem](../../api/org/apache/hadoop/fs/FileSystem.html).getStatus +### Get EC Codecs + +* Submit a HTTP GET request. + + curl -i "http://:/webhdfs/v1/?op=GETALLECCODECS" + + The client receives a response with a [`ECCodecs` JSON object](#EC_Codecs_JSON_Schema): + + HTTP/1.1 200 OK + Content-Type: application/json + Transfer-Encoding: chunked + + { + "ErasureCodeCodecs": { + "rs": "rs_native, rs_java", + "rs-legacy": "rs-legacy_java", + "xor":"xor_native, xor_java" + } + } + Storage Policy Operations ------------------------- @@ -3244,6 +3265,18 @@ var blockLocationProperties = } ``` +### EC Codecs JSON Schema + +```json +{ + "ErasureCodingCodecs": { + "rs": "rs_native, rs_java", + "rs-legacy": "rs-legacy_java", + "xor": "xor_native, xor_java" + } +} +``` + HTTP Query Parameter Dictionary ------------------------------- diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java index 6bd7373248..18382d201e 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/web/TestWebHDFS.java @@ -2318,6 +2318,30 @@ public void testGetErasureCodingPolicies() throws Exception { } } + @Test + public void getAllErasureCodingCodecs() throws Exception { + final Configuration conf = WebHdfsTestUtil.createConf(); + cluster = new MiniDFSCluster.Builder(conf).build(); + try { + cluster.waitActive(); + + final WebHdfsFileSystem webHdfs = + WebHdfsTestUtil.getWebHdfsFileSystem(conf, + WebHdfsConstants.WEBHDFS_SCHEME); + + final DistributedFileSystem dfs = cluster.getFileSystem(); + + Map webHdfsEcCodecs = webHdfs.getAllErasureCodingCodecs(); + + Map dfsEcCodecs = dfs.getAllErasureCodingCodecs(); + + //Validate erasureCodingCodecs are the same as DistributedFileSystem + assertEquals(webHdfsEcCodecs, dfsEcCodecs); + } finally { + cluster.shutdown(); + } + } + /** * Get FileStatus JSONObject from ListStatus response. */