HADOOP-12056. Use DirectoryStream in DiskChecker#checkDirs to detect errors when listing a directory. Contributed by Zhihai Xu.
This commit is contained in:
parent
2dbc40e608
commit
bc11e158b1
@ -637,6 +637,9 @@ Release 2.8.0 - UNRELEASED
|
||||
HADOOP-12059. S3Credentials should support use of CredentialProvider.
|
||||
(Sean Busbey via wang)
|
||||
|
||||
HADOOP-12056. Use DirectoryStream in DiskChecker#checkDirs to detect
|
||||
errors when listing a directory. (Zhihai Xu via wang)
|
||||
|
||||
OPTIMIZATIONS
|
||||
|
||||
HADOOP-11785. Reduce the number of listStatus operation in distcp
|
||||
|
@ -20,6 +20,9 @@
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.DirectoryStream;
|
||||
import java.nio.file.DirectoryIteratorException;
|
||||
import java.nio.file.Files;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
@ -86,13 +89,26 @@ public static boolean mkdirsWithExistsCheck(File dir) {
|
||||
*/
|
||||
public static void checkDirs(File dir) throws DiskErrorException {
|
||||
checkDir(dir);
|
||||
for (File child : dir.listFiles()) {
|
||||
if (child.isDirectory()) {
|
||||
checkDirs(child);
|
||||
IOException ex = null;
|
||||
try (DirectoryStream<java.nio.file.Path> stream =
|
||||
Files.newDirectoryStream(dir.toPath())) {
|
||||
for (java.nio.file.Path entry: stream) {
|
||||
File child = entry.toFile();
|
||||
if (child.isDirectory()) {
|
||||
checkDirs(child);
|
||||
}
|
||||
}
|
||||
} catch (DirectoryIteratorException de) {
|
||||
ex = de.getCause();
|
||||
} catch (IOException ie) {
|
||||
ex = ie;
|
||||
}
|
||||
if (ex != null) {
|
||||
throw new DiskErrorException("I/O error when open a directory: "
|
||||
+ dir.toString(), ex);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Create the directory if it doesn't exist and check that dir is readable,
|
||||
* writable and executable
|
||||
|
@ -32,6 +32,7 @@
|
||||
import org.apache.hadoop.fs.LocalFileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.fs.permission.FsPermission;
|
||||
import org.apache.hadoop.test.GenericTestUtils;
|
||||
import org.apache.hadoop.util.DiskChecker.DiskErrorException;
|
||||
import org.apache.hadoop.util.Shell;
|
||||
|
||||
@ -180,4 +181,25 @@ private void _checkDirs(boolean isDir, String perm, boolean success)
|
||||
System.out.println("checkDir success: " + success);
|
||||
|
||||
}
|
||||
|
||||
@Test (timeout = 30000)
|
||||
public void testCheckDirsIOException() throws Throwable {
|
||||
Path path = new Path("target", TestDiskChecker.class.getSimpleName());
|
||||
File localDir = new File(path.toUri().getRawPath());
|
||||
localDir.mkdir();
|
||||
File localFile = new File(localDir, "test");
|
||||
localFile.createNewFile();
|
||||
File spyLocalDir = spy(localDir);
|
||||
doReturn(localFile.toPath()).when(spyLocalDir).toPath();
|
||||
try {
|
||||
DiskChecker.checkDirs(spyLocalDir);
|
||||
fail("Expected exception for I/O error");
|
||||
} catch (DiskErrorException e) {
|
||||
GenericTestUtils.assertExceptionContains("I/O error", e);
|
||||
assertTrue(e.getCause() instanceof IOException);
|
||||
} finally {
|
||||
localFile.delete();
|
||||
localDir.delete();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user