diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java index 111b23b0ef..4eb9b32fe9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DistributedFileSystem.java @@ -3380,6 +3380,12 @@ public RemoteIterator listOpenFiles() throws IOException { return dfs.listOpenFiles(); } + @Deprecated + public RemoteIterator listOpenFiles( + EnumSet openFilesTypes) throws IOException { + return dfs.listOpenFiles(openFilesTypes); + } + public RemoteIterator listOpenFiles( EnumSet openFilesTypes, String path) throws IOException { return dfs.listOpenFiles(openFilesTypes, path); diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsAdmin.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsAdmin.java index 575cd24789..3b3d563cd4 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsAdmin.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/client/HdfsAdmin.java @@ -626,6 +626,12 @@ public RemoteIterator listOpenFiles() throws IOException { return dfs.listOpenFiles(); } + @Deprecated + public RemoteIterator listOpenFiles( + EnumSet openFilesTypes) throws IOException { + return dfs.listOpenFiles(openFilesTypes); + } + public RemoteIterator listOpenFiles( EnumSet openFilesTypes, String path) throws IOException { return dfs.listOpenFiles(openFilesTypes, path); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java index a27ed78e0d..3cb8c802a7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestDistributedFileSystem.java @@ -94,6 +94,8 @@ import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction; import org.apache.hadoop.hdfs.protocol.HdfsConstants.StoragePolicySatisfierMode; import org.apache.hadoop.hdfs.protocol.LocatedBlock; +import org.apache.hadoop.hdfs.protocol.OpenFileEntry; +import org.apache.hadoop.hdfs.protocol.OpenFilesIterator; import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsDatasetSpi; import org.apache.hadoop.hdfs.server.datanode.fsdataset.FsVolumeSpi; @@ -194,8 +196,10 @@ public void testFileSystemCloseAll() throws Exception { * Tests DFSClient.close throws no ConcurrentModificationException if * multiple files are open. * Also tests that any cached sockets are closed. (HDFS-3359) + * Also tests deprecated listOpenFiles(EnumSet<>). (HDFS-14595) */ @Test + @SuppressWarnings("deprecation") // call to listOpenFiles(EnumSet<>) public void testDFSClose() throws Exception { Configuration conf = getTestConfiguration(); MiniDFSCluster cluster = null; @@ -207,6 +211,19 @@ public void testDFSClose() throws Exception { fileSys.create(new Path("/test/dfsclose/file-0")); fileSys.create(new Path("/test/dfsclose/file-1")); + // Test listOpenFiles(EnumSet<>) + List types = new ArrayList<>(); + types.add(OpenFilesIterator.OpenFilesType.ALL_OPEN_FILES); + RemoteIterator listOpenFiles = + fileSys.listOpenFiles(EnumSet.copyOf(types)); + assertTrue("Two files should be open", listOpenFiles.hasNext()); + int countOpenFiles = 0; + while (listOpenFiles.hasNext()) { + listOpenFiles.next(); + ++countOpenFiles; + } + assertEquals("Mismatch of open files count", 2, countOpenFiles); + // create another file, close it, and read it, so // the client gets a socket in its SocketCache Path p = new Path("/non-empty-file"); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHdfsAdmin.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHdfsAdmin.java index 6d8ba1a9f4..783531d361 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHdfsAdmin.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestHdfsAdmin.java @@ -233,6 +233,9 @@ public void testListOpenFiles() throws IOException { closedFileSet.add(filePath); } verifyOpenFiles(closedFileSet, openFileMap); + // Verify again with the old listOpenFiles(EnumSet<>) API + // Just to verify old API's validity + verifyOpenFilesOld(closedFileSet, openFileMap); openFileMap.putAll( DFSTestUtil.createOpenFiles(fs, "open-file-1", numOpenFiles)); @@ -252,13 +255,10 @@ public void testListOpenFiles() throws IOException { } } - private void verifyOpenFiles(HashSet closedFiles, - HashMap openFileMap) throws IOException { - HdfsAdmin hdfsAdmin = new HdfsAdmin(FileSystem.getDefaultUri(conf), conf); - HashSet openFiles = new HashSet<>(openFileMap.keySet()); - RemoteIterator openFilesRemoteItr = - hdfsAdmin.listOpenFiles(EnumSet.of(OpenFilesType.ALL_OPEN_FILES), - OpenFilesIterator.FILTER_PATH_DEFAULT); + private void verifyOpenFilesHelper( + RemoteIterator openFilesRemoteItr, + HashSet closedFiles, + HashSet openFiles) throws IOException { while (openFilesRemoteItr.hasNext()) { String filePath = openFilesRemoteItr.next().getFilePath(); assertFalse(filePath + " should not be listed under open files!", @@ -266,6 +266,30 @@ private void verifyOpenFiles(HashSet closedFiles, assertTrue(filePath + " is not listed under open files!", openFiles.remove(new Path(filePath))); } + } + + private void verifyOpenFiles(HashSet closedFiles, + HashMap openFileMap) throws IOException { + HdfsAdmin hdfsAdmin = new HdfsAdmin(FileSystem.getDefaultUri(conf), conf); + HashSet openFiles = new HashSet<>(openFileMap.keySet()); + RemoteIterator openFilesRemoteItr = + hdfsAdmin.listOpenFiles(EnumSet.of(OpenFilesType.ALL_OPEN_FILES), + OpenFilesIterator.FILTER_PATH_DEFAULT); + verifyOpenFilesHelper(openFilesRemoteItr, closedFiles, openFiles); + assertTrue("Not all open files are listed!", openFiles.isEmpty()); + } + + /** + * Using deprecated HdfsAdmin#listOpenFiles(EnumSet<>) to verify open files. + */ + @SuppressWarnings("deprecation") // call to listOpenFiles(EnumSet<>) + private void verifyOpenFilesOld(HashSet closedFiles, + HashMap openFileMap) throws IOException { + HdfsAdmin hdfsAdmin = new HdfsAdmin(FileSystem.getDefaultUri(conf), conf); + HashSet openFiles = new HashSet<>(openFileMap.keySet()); + RemoteIterator openFilesRemoteItr = + hdfsAdmin.listOpenFiles(EnumSet.of(OpenFilesType.ALL_OPEN_FILES)); + verifyOpenFilesHelper(openFilesRemoteItr, closedFiles, openFiles); assertTrue("Not all open files are listed!", openFiles.isEmpty()); } }