diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java index d1babe3280..84a59cb79b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java @@ -3549,7 +3549,8 @@ public RemoteIterator listOpenFiles( public RemoteIterator listOpenFiles( EnumSet openFilesTypes, String path) throws IOException { - return dfs.listOpenFiles(openFilesTypes, path); + Path absF = fixRelativePart(new Path(path)); + return dfs.listOpenFiles(openFilesTypes, getPathName(absF)); } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java index 99ad6f2eb0..d2725f4301 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSNamesystem.java @@ -1904,6 +1904,7 @@ private void metaSave(PrintWriter out) { */ BatchedListEntries listOpenFiles(long prevId, EnumSet openFilesTypes, String path) throws IOException { + INode.checkAbsolutePath(path); final String operationName = "listOpenFiles"; checkSuperuserPrivilege(); checkOperation(OperationCategory.READ); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java index 6b29b33f3f..6a52440cf2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/INode.java @@ -806,7 +806,7 @@ static boolean isValidAbsolutePath(final String path){ return path != null && path.startsWith(Path.SEPARATOR); } - private static void checkAbsolutePath(final String path) { + static void checkAbsolutePath(final String path) { if (!isValidAbsolutePath(path)) { throw new AssertionError("Absolute path required, but got '" + path + "'"); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestListOpenFiles.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestListOpenFiles.java index 2158bc7a44..c6603cfee2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestListOpenFiles.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/server/namenode/TestListOpenFiles.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.hdfs.server.namenode; +import static org.apache.hadoop.test.LambdaTestUtils.intercept; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertTrue; @@ -295,4 +296,29 @@ public void testListOpenFilesWithFilterPath() throws IOException { verifyOpenFiles(openFiles, OpenFilesIterator.FILTER_PATH_DEFAULT); } } + + @Test + public void testListOpenFilesWithInvalidPathServerSide() throws Exception { + HashMap openFiles = new HashMap<>(); + openFiles.putAll( + DFSTestUtil.createOpenFiles(fs, new Path("/base"), "open-1", 1)); + verifyOpenFiles(openFiles, EnumSet.of(OpenFilesType.ALL_OPEN_FILES), + "/base"); + intercept(AssertionError.class, "Absolute path required", + "Expect InvalidPathException", () -> verifyOpenFiles(new HashMap<>(), + EnumSet.of(OpenFilesType.ALL_OPEN_FILES), "hdfs://cluster/base")); + while(openFiles.size() > 0) { + DFSTestUtil.closeOpenFiles(openFiles, 1); + verifyOpenFiles(openFiles); + } + } + + @Test + public void testListOpenFilesWithInvalidPathClientSide() throws Exception { + intercept(IllegalArgumentException.class, "Wrong FS", + "Expect IllegalArgumentException", () -> fs + .listOpenFiles(EnumSet.of(OpenFilesType.ALL_OPEN_FILES), + "hdfs://non-cluster/")); + fs.listOpenFiles(EnumSet.of(OpenFilesType.ALL_OPEN_FILES), "/path"); + } }