Commit the test and the conf changes in common for HDFS-2284.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1166009 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Tsz-wo Sze 2011-09-07 06:05:04 +00:00
parent 0ecba04727
commit 1d6793d0b7
3 changed files with 13 additions and 8 deletions

View File

@ -317,6 +317,11 @@
<value>org.apache.hadoop.hdfs.HsftpFileSystem</value> <value>org.apache.hadoop.hdfs.HsftpFileSystem</value>
</property> </property>
<property>
<name>fs.webhdfs.impl</name>
<value>org.apache.hadoop.hdfs.web.WebHdfsFileSystem</value>
</property>
<property> <property>
<name>fs.ftp.impl</name> <name>fs.ftp.impl</name>
<value>org.apache.hadoop.fs.ftp.FTPFileSystem</value> <value>org.apache.hadoop.fs.ftp.FTPFileSystem</value>

View File

@ -32,6 +32,7 @@
import org.junit.Assert; import org.junit.Assert;
import org.junit.Before; import org.junit.Before;
import org.junit.Test; import org.junit.Test;
import org.mortbay.log.Log;
import static org.apache.hadoop.fs.FileSystemTestHelper.*; import static org.apache.hadoop.fs.FileSystemTestHelper.*;
@ -62,8 +63,6 @@ public abstract class FSMainOperationsBaseTest {
private static String TEST_DIR_AXX = "test/hadoop/axx"; private static String TEST_DIR_AXX = "test/hadoop/axx";
private static int numBlocks = 2; private static int numBlocks = 2;
static final String LOCAL_FS_ROOT_URI = "file:///tmp/test";
protected static FileSystem fSys; protected static FileSystem fSys;
@ -83,7 +82,7 @@ public boolean accept(Path file) {
} }
}; };
private static byte[] data = getFileData(numBlocks, protected static final byte[] data = getFileData(numBlocks,
getDefaultBlockSize()); getDefaultBlockSize());
@Before @Before
@ -183,7 +182,7 @@ public void testWorkingDirectory() throws Exception {
@Test @Test
public void testWDAbsolute() throws IOException { public void testWDAbsolute() throws IOException {
Path absoluteDir = new Path(LOCAL_FS_ROOT_URI + "/existingDir"); Path absoluteDir = new Path(fSys.getUri() + "/test/existingDir");
fSys.mkdirs(absoluteDir); fSys.mkdirs(absoluteDir);
fSys.setWorkingDirectory(absoluteDir); fSys.setWorkingDirectory(absoluteDir);
Assert.assertEquals(absoluteDir, fSys.getWorkingDirectory()); Assert.assertEquals(absoluteDir, fSys.getWorkingDirectory());
@ -646,7 +645,7 @@ public void testWriteReadAndDeleteTwoBlocks() throws Exception {
writeReadAndDelete(getDefaultBlockSize() * 2); writeReadAndDelete(getDefaultBlockSize() * 2);
} }
private void writeReadAndDelete(int len) throws IOException { protected void writeReadAndDelete(int len) throws IOException {
Path path = getTestRootPath(fSys, "test/hadoop/file"); Path path = getTestRootPath(fSys, "test/hadoop/file");
fSys.mkdirs(path.getParent()); fSys.mkdirs(path.getParent());
@ -768,6 +767,7 @@ public void testRenameNonExistentPath() throws Exception {
rename(src, dst, false, false, false, Rename.NONE); rename(src, dst, false, false, false, Rename.NONE);
Assert.fail("Should throw FileNotFoundException"); Assert.fail("Should throw FileNotFoundException");
} catch (IOException e) { } catch (IOException e) {
Log.info("XXX", e);
Assert.assertTrue(unwrapException(e) instanceof FileNotFoundException); Assert.assertTrue(unwrapException(e) instanceof FileNotFoundException);
} }

View File

@ -45,7 +45,7 @@
public abstract class FileSystemContractBaseTest extends TestCase { public abstract class FileSystemContractBaseTest extends TestCase {
protected FileSystem fs; protected FileSystem fs;
private byte[] data = new byte[getBlockSize() * 2]; // two blocks of data protected byte[] data = new byte[getBlockSize() * 2]; // two blocks of data
{ {
for (int i = 0; i < data.length; i++) { for (int i = 0; i < data.length; i++) {
data[i] = (byte) (i % 10); data[i] = (byte) (i % 10);
@ -215,7 +215,7 @@ public void testWriteReadAndDeleteTwoBlocks() throws Exception {
writeReadAndDelete(getBlockSize() * 2); writeReadAndDelete(getBlockSize() * 2);
} }
private void writeReadAndDelete(int len) throws IOException { protected void writeReadAndDelete(int len) throws IOException {
Path path = path("/test/hadoop/file"); Path path = path("/test/hadoop/file");
fs.mkdirs(path.getParent()); fs.mkdirs(path.getParent());
@ -256,7 +256,7 @@ public void testOverwrite() throws IOException {
assertEquals("Length", data.length, fs.getFileStatus(path).getLen()); assertEquals("Length", data.length, fs.getFileStatus(path).getLen());
try { try {
fs.create(path, false); fs.create(path, false).close();
fail("Should throw IOException."); fail("Should throw IOException.");
} catch (IOException e) { } catch (IOException e) {
// Expected // Expected