HDFS-3603. Decouple TestHDFSTrash from TestTrash. Contributed by Jason Lowe

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1358804 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Eli Collins 2012-07-08 18:15:30 +00:00
parent d4fb882163
commit 6dcf42262d
3 changed files with 22 additions and 21 deletions

View File

@ -89,7 +89,7 @@ static void checkNotInTrash(FileSystem fs, Path trashRoot, String pathname)
* @param base - the base path where files are created * @param base - the base path where files are created
* @throws IOException * @throws IOException
*/ */
protected static void trashShell(final FileSystem fs, final Path base) public static void trashShell(final FileSystem fs, final Path base)
throws IOException { throws IOException {
Configuration conf = new Configuration(); Configuration conf = new Configuration();
conf.set("fs.defaultFS", fs.getUri().toString()); conf.set("fs.defaultFS", fs.getUri().toString());

View File

@ -439,6 +439,8 @@ Branch-2 ( Unreleased changes )
so that V conforms to boolean compiling HttpFSServer.java with OpenJDK so that V conforms to boolean compiling HttpFSServer.java with OpenJDK
(adi2 via tucu) (adi2 via tucu)
HDFS-3603. Decouple TestHDFSTrash from TestTrash. (Jason Lowe via eli)
Release 2.0.0-alpha - 05-23-2012 Release 2.0.0-alpha - 05-23-2012
INCOMPATIBLE CHANGES INCOMPATIBLE CHANGES

View File

@ -19,46 +19,45 @@
import java.io.IOException; import java.io.IOException;
import junit.extensions.TestSetup;
import junit.framework.Test;
import junit.framework.TestSuite;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.TestTrash; import org.apache.hadoop.fs.TestTrash;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
/** /**
* This class tests commands from Trash. * This class tests commands from Trash.
*/ */
public class TestHDFSTrash extends TestTrash { public class TestHDFSTrash {
private static MiniDFSCluster cluster = null; private static MiniDFSCluster cluster = null;
public static Test suite() {
TestSetup setup = new TestSetup(new TestSuite(TestHDFSTrash.class)) { @BeforeClass
protected void setUp() throws Exception { public static void setUp() throws Exception {
Configuration conf = new HdfsConfiguration(); Configuration conf = new HdfsConfiguration();
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build(); cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
} }
protected void tearDown() throws Exception {
@AfterClass
public static void tearDown() {
if (cluster != null) { cluster.shutdown(); } if (cluster != null) { cluster.shutdown(); }
} }
};
return setup;
}
/** /**
* Tests Trash on HDFS * Tests Trash on HDFS
*/ */
@Test
public void testTrash() throws IOException { public void testTrash() throws IOException {
trashShell(cluster.getFileSystem(), new Path("/")); TestTrash.trashShell(cluster.getFileSystem(), new Path("/"));
} }
@Test
public void testNonDefaultFS() throws IOException { public void testNonDefaultFS() throws IOException {
FileSystem fs = cluster.getFileSystem(); FileSystem fs = cluster.getFileSystem();
Configuration conf = fs.getConf(); Configuration conf = fs.getConf();
conf.set(DFSConfigKeys.FS_DEFAULT_NAME_KEY, fs.getUri().toString()); conf.set(DFSConfigKeys.FS_DEFAULT_NAME_KEY, fs.getUri().toString());
trashNonDefaultFS(conf); TestTrash.trashNonDefaultFS(conf);
} }
} }