HDFS-4586. TestDataDirs.testGetDataDirsFromURIs fails with all directories in dfs.datanode.data.dir are invalid. Contributed by Ivan Mitic.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1455708 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Aaron Myers 2013-03-12 20:55:33 +00:00
parent 6a9ccd809b
commit 813e97494a
3 changed files with 37 additions and 22 deletions

View File

@ -309,6 +309,9 @@ Trunk (Unreleased)
HDFS-4391. TestDataTransferKeepalive fails when tests are executed in a
certain order. (Andrew Wang via atm)
HDFS-4586. TestDataDirs.testGetDataDirsFromURIs fails with all directories
in dfs.datanode.data.dir are invalid. (Ivan Mitic via atm)
BREAKDOWN OF HADOOP-8562 SUBTASKS AND RELATED JIRAS
HDFS-4145. Merge hdfs cmd line scripts from branch-1-win. (David Lao,

View File

@ -1617,6 +1617,21 @@ void join() {
}
}
// Small wrapper around the DiskChecker class that provides means to mock
// DiskChecker static methods and unittest DataNode#getDataDirsFromURIs.
static class DataNodeDiskChecker {
private FsPermission expectedPermission;
public DataNodeDiskChecker(FsPermission expectedPermission) {
this.expectedPermission = expectedPermission;
}
public void checkDir(LocalFileSystem localFS, Path path)
throws DiskErrorException, IOException {
DiskChecker.checkDir(localFS, path, expectedPermission);
}
}
/**
* Make an instance of DataNode after ensuring that at least one of the
* given data directories (and their parent directories, if necessary)
@ -1635,7 +1650,10 @@ static DataNode makeInstance(Collection<URI> dataDirs, Configuration conf,
FsPermission permission = new FsPermission(
conf.get(DFS_DATANODE_DATA_DIR_PERMISSION_KEY,
DFS_DATANODE_DATA_DIR_PERMISSION_DEFAULT));
ArrayList<File> dirs = getDataDirsFromURIs(dataDirs, localFS, permission);
DataNodeDiskChecker dataNodeDiskChecker =
new DataNodeDiskChecker(permission);
ArrayList<File> dirs =
getDataDirsFromURIs(dataDirs, localFS, dataNodeDiskChecker);
DefaultMetricsSystem.initialize("DataNode");
assert dirs.size() > 0 : "number of data directories should be > 0";
@ -1644,7 +1662,8 @@ static DataNode makeInstance(Collection<URI> dataDirs, Configuration conf,
// DataNode ctor expects AbstractList instead of List or Collection...
static ArrayList<File> getDataDirsFromURIs(Collection<URI> dataDirs,
LocalFileSystem localFS, FsPermission permission) throws IOException {
LocalFileSystem localFS, DataNodeDiskChecker dataNodeDiskChecker)
throws IOException {
ArrayList<File> dirs = new ArrayList<File>();
StringBuilder invalidDirs = new StringBuilder();
for (URI dirURI : dataDirs) {
@ -1656,7 +1675,7 @@ static ArrayList<File> getDataDirsFromURIs(Collection<URI> dataDirs,
// drop any (illegal) authority in the URI for backwards compatibility
File dir = new File(dirURI.getPath());
try {
DiskChecker.checkDir(localFS, new Path(dir.toURI()), permission);
dataNodeDiskChecker.checkDir(localFS, new Path(dir.toURI()));
dirs.add(dir);
} catch (IOException ioe) {
LOG.warn("Invalid " + DFS_DATANODE_DATA_DIR_KEY + " "

View File

@ -27,33 +27,26 @@
import org.junit.Test;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import static org.apache.hadoop.test.MockitoMaker.*;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.server.datanode.DataNode.DataNodeDiskChecker;
public class TestDataDirs {
@Test public void testGetDataDirsFromURIs() throws Throwable {
File localDir = make(stub(File.class).returning(true).from.exists());
when(localDir.mkdir()).thenReturn(true);
FsPermission normalPerm = new FsPermission("700");
FsPermission badPerm = new FsPermission("000");
FileStatus stat = make(stub(FileStatus.class)
.returning(normalPerm, normalPerm, badPerm).from.getPermission());
when(stat.isDirectory()).thenReturn(true);
LocalFileSystem fs = make(stub(LocalFileSystem.class)
.returning(stat).from.getFileStatus(any(Path.class)));
when(fs.pathToFile(any(Path.class))).thenReturn(localDir);
@Test (timeout = 10000)
public void testGetDataDirsFromURIs() throws Throwable {
DataNodeDiskChecker diskChecker = mock(DataNodeDiskChecker.class);
doThrow(new IOException()).doThrow(new IOException()).doNothing()
.when(diskChecker).checkDir(any(LocalFileSystem.class), any(Path.class));
LocalFileSystem fs = mock(LocalFileSystem.class);
Collection<URI> uris = Arrays.asList(new URI("file:/p1/"),
new URI("file:/p2/"), new URI("file:/p3/"));
List<File> dirs = DataNode.getDataDirsFromURIs(uris, fs, normalPerm);
verify(fs, times(2)).setPermission(any(Path.class), eq(normalPerm));
verify(fs, times(6)).getFileStatus(any(Path.class));
assertEquals("number of valid data dirs", dirs.size(), 1);
List<File> dirs = DataNode.getDataDirsFromURIs(uris, fs, diskChecker);
assertEquals("number of valid data dirs", 1, dirs.size());
String validDir = dirs.iterator().next().getPath();
assertEquals("p3 should be valid", new File("/p3").getPath(), validDir);
}
}