MAPREDUCE-6543. Migrate MR client test cases part 2. Contributed by Dustin Cote.
This commit is contained in:
parent
80182809ae
commit
0050fa5f1c
@ -28,8 +28,6 @@
|
||||
import java.util.Date;
|
||||
import java.util.StringTokenizer;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
@ -39,8 +37,9 @@
|
||||
import org.apache.hadoop.io.SequenceFile.CompressionType;
|
||||
import org.apache.hadoop.mapred.*;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
/**
|
||||
* Distributed i/o benchmark.
|
||||
* <p>
|
||||
* This test writes into or reads from a specified number of files.
|
||||
@ -68,7 +67,7 @@
|
||||
* </ul>
|
||||
*/
|
||||
@Ignore
|
||||
public class DFSCIOTest extends TestCase {
|
||||
public class DFSCIOTest {
|
||||
// Constants
|
||||
private static final Log LOG = LogFactory.getLog(DFSCIOTest.class);
|
||||
private static final int TEST_TYPE_READ = 0;
|
||||
@ -98,6 +97,7 @@ public class DFSCIOTest extends TestCase {
|
||||
*
|
||||
* @throws Exception
|
||||
*/
|
||||
@Test
|
||||
public void testIOs() throws Exception {
|
||||
testIOs(10, 10);
|
||||
}
|
||||
|
@ -34,8 +34,6 @@
|
||||
import java.net.InetSocketAddress;
|
||||
import java.net.URI;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.conf.Configured;
|
||||
@ -50,8 +48,15 @@
|
||||
import org.apache.hadoop.mapred.lib.LongSumReducer;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotSame;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
public class TestFileSystem extends TestCase {
|
||||
|
||||
public class TestFileSystem {
|
||||
private static final Log LOG = FileSystem.LOG;
|
||||
|
||||
private static Configuration conf = new Configuration();
|
||||
@ -66,6 +71,7 @@ public class TestFileSystem extends TestCase {
|
||||
private static Path READ_DIR = new Path(ROOT, "fs_read");
|
||||
private static Path DATA_DIR = new Path(ROOT, "fs_data");
|
||||
|
||||
@Test
|
||||
public void testFs() throws Exception {
|
||||
testFs(10 * MEGA, 100, 0);
|
||||
}
|
||||
@ -90,7 +96,8 @@ public static void testFs(long megaBytes, int numFiles, long seed)
|
||||
fs.delete(READ_DIR, true);
|
||||
}
|
||||
|
||||
public static void testCommandFormat() throws Exception {
|
||||
@Test
|
||||
public void testCommandFormat() throws Exception {
|
||||
// This should go to TestFsShell.java when it is added.
|
||||
CommandFormat cf;
|
||||
cf= new CommandFormat("copyToLocal", 2,2,"crc","ignoreCrc");
|
||||
@ -488,6 +495,7 @@ public static void main(String[] args) throws Exception {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFsCache() throws Exception {
|
||||
{
|
||||
long now = System.currentTimeMillis();
|
||||
@ -561,6 +569,7 @@ static void checkPath(MiniDFSCluster cluster, FileSystem fileSys) throws IOExcep
|
||||
+ StringUtils.toUpperCase(add.getHostName()) + ":" + add.getPort()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFsClose() throws Exception {
|
||||
{
|
||||
Configuration conf = new Configuration();
|
||||
@ -569,6 +578,7 @@ public void testFsClose() throws Exception {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFsShutdownHook() throws Exception {
|
||||
final Set<FileSystem> closed = Collections.synchronizedSet(new HashSet<FileSystem>());
|
||||
Configuration conf = new Configuration();
|
||||
@ -600,7 +610,7 @@ public void testFsShutdownHook() throws Exception {
|
||||
assertTrue(closed.contains(fsWithoutAuto));
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testCacheKeysAreCaseInsensitive()
|
||||
throws Exception
|
||||
{
|
||||
|
@ -23,19 +23,18 @@
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.File;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
/**
|
||||
* Test Job History Log Analyzer.
|
||||
*
|
||||
* @see JHLogAnalyzer
|
||||
*/
|
||||
public class TestJHLA extends TestCase {
|
||||
public class TestJHLA {
|
||||
private static final Log LOG = LogFactory.getLog(JHLogAnalyzer.class);
|
||||
private String historyLog = System.getProperty("test.build.data",
|
||||
"build/test/data") + "/history/test.log";
|
||||
@ -133,6 +132,7 @@ public void tearDown() throws Exception {
|
||||
/**
|
||||
* Run log analyzer in test mode for file test.log.
|
||||
*/
|
||||
@Test
|
||||
public void testJHLA() {
|
||||
String[] args = {"-test", historyLog, "-jobDelimiter", ".!!FILE=.*!!"};
|
||||
JHLogAnalyzer.main(args);
|
||||
|
@ -32,21 +32,25 @@
|
||||
import org.apache.hadoop.io.compress.DefaultCodec;
|
||||
import org.apache.hadoop.mapred.*;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.commons.logging.*;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
public class TestSequenceFileMergeProgress extends TestCase {
|
||||
public class TestSequenceFileMergeProgress {
|
||||
private static final Log LOG = FileInputFormat.LOG;
|
||||
private static final int RECORDS = 10000;
|
||||
|
||||
|
||||
@Test
|
||||
public void testMergeProgressWithNoCompression() throws IOException {
|
||||
runTest(SequenceFile.CompressionType.NONE);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMergeProgressWithRecordCompression() throws IOException {
|
||||
runTest(SequenceFile.CompressionType.RECORD);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMergeProgressWithBlockCompression() throws IOException {
|
||||
runTest(SequenceFile.CompressionType.BLOCK);
|
||||
}
|
||||
@ -92,7 +96,7 @@ public void runTest(CompressionType compressionType) throws IOException {
|
||||
count++;
|
||||
}
|
||||
assertEquals(RECORDS, count);
|
||||
assertEquals(1.0f, rIter.getProgress().get());
|
||||
assertEquals(1.0f, rIter.getProgress().get(), 0.0000);
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -17,10 +17,11 @@
|
||||
*/
|
||||
package org.apache.hadoop.mapred;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
@ -41,7 +42,7 @@
|
||||
* <p/>
|
||||
* The DFS filesystem is formated before the testcase starts and after it ends.
|
||||
*/
|
||||
public abstract class ClusterMapReduceTestCase extends TestCase {
|
||||
public abstract class ClusterMapReduceTestCase {
|
||||
private MiniDFSCluster dfsCluster = null;
|
||||
private MiniMRCluster mrCluster = null;
|
||||
|
||||
@ -50,9 +51,8 @@ public abstract class ClusterMapReduceTestCase extends TestCase {
|
||||
*
|
||||
* @throws Exception
|
||||
*/
|
||||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
startCluster(true, null);
|
||||
}
|
||||
|
||||
@ -139,9 +139,9 @@ protected void stopCluster() throws Exception {
|
||||
*
|
||||
* @throws Exception
|
||||
*/
|
||||
protected void tearDown() throws Exception {
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
stopCluster();
|
||||
super.tearDown();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -28,13 +28,13 @@
|
||||
import org.apache.hadoop.ipc.TestRPC.TestProtocol;
|
||||
import org.apache.hadoop.mapred.AuditLogger.Keys;
|
||||
import org.apache.hadoop.net.NetUtils;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
/**
|
||||
* Tests {@link AuditLogger}.
|
||||
*/
|
||||
public class TestAuditLogger extends TestCase {
|
||||
public class TestAuditLogger {
|
||||
private static final String USER = "test";
|
||||
private static final String OPERATION = "oper";
|
||||
private static final String TARGET = "tgt";
|
||||
@ -44,6 +44,7 @@ public class TestAuditLogger extends TestCase {
|
||||
/**
|
||||
* Test the AuditLog format with key-val pair.
|
||||
*/
|
||||
@Test
|
||||
public void testKeyValLogFormat() {
|
||||
StringBuilder actLog = new StringBuilder();
|
||||
StringBuilder expLog = new StringBuilder();
|
||||
@ -114,6 +115,7 @@ private void testFailureLogFormat(boolean checkIP) {
|
||||
/**
|
||||
* Test {@link AuditLogger} without IP set.
|
||||
*/
|
||||
@Test
|
||||
public void testAuditLoggerWithoutIP() throws Exception {
|
||||
// test without ip
|
||||
testSuccessLogFormat(false);
|
||||
@ -137,6 +139,7 @@ public void ping() {
|
||||
/**
|
||||
* Test {@link AuditLogger} with IP set.
|
||||
*/
|
||||
@Test
|
||||
public void testAuditLoggerWithIP() throws Exception {
|
||||
Configuration conf = new Configuration();
|
||||
// start the IPC server
|
||||
|
@ -40,6 +40,11 @@
|
||||
import org.apache.hadoop.mapreduce.TaskCounter;
|
||||
import org.apache.hadoop.util.ReflectionUtils;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
@Ignore
|
||||
public class TestBadRecords extends ClusterMapReduceTestCase {
|
||||
|
||||
@ -206,7 +211,8 @@ private List<String> getProcessed(List<String> inputs, List<String> badRecs) {
|
||||
}
|
||||
return processed;
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testBadMapRed() throws Exception {
|
||||
JobConf conf = createJobConf();
|
||||
conf.setMapperClass(BadMapper.class);
|
||||
|
@ -29,6 +29,12 @@
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.LongWritable;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNull;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
public class TestClusterMapReduceTestCase extends ClusterMapReduceTestCase {
|
||||
public void _testMapReduce(boolean restart) throws Exception {
|
||||
OutputStream os = getFileSystem().create(new Path(getInputDir(), "text.txt"));
|
||||
@ -85,14 +91,17 @@ public void _testMapReduce(boolean restart) throws Exception {
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMapReduce() throws Exception {
|
||||
_testMapReduce(false);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMapReduceRestarting() throws Exception {
|
||||
_testMapReduce(true);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDFSRestart() throws Exception {
|
||||
Path file = new Path(getInputDir(), "text.txt");
|
||||
OutputStream os = getFileSystem().create(file);
|
||||
@ -109,6 +118,7 @@ public void testDFSRestart() throws Exception {
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMRConfig() throws Exception {
|
||||
JobConf conf = createJobConf();
|
||||
assertNull(conf.get("xyz"));
|
||||
|
@ -21,15 +21,15 @@
|
||||
import org.apache.hadoop.io.*;
|
||||
import org.apache.hadoop.mapred.UtilsForTests.RandomInputFormat;
|
||||
import org.apache.hadoop.mapreduce.MRConfig;
|
||||
import org.junit.Test;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
|
||||
/**
|
||||
* TestCollect checks if the collect can handle simultaneous invocations.
|
||||
*/
|
||||
public class TestCollect extends TestCase
|
||||
public class TestCollect
|
||||
{
|
||||
final static Path OUTPUT_DIR = new Path("build/test/test.collect.output");
|
||||
static final int NUM_FEEDERS = 10;
|
||||
@ -127,7 +127,7 @@ public void configure(JobConf conf) throws IOException {
|
||||
conf.setNumMapTasks(1);
|
||||
conf.setNumReduceTasks(1);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCollect() throws IOException {
|
||||
JobConf conf = new JobConf();
|
||||
configure(conf);
|
||||
@ -144,9 +144,5 @@ public void testCollect() throws IOException {
|
||||
fs.delete(OUTPUT_DIR, true);
|
||||
}
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws IOException {
|
||||
new TestCollect().testCollect();
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -21,28 +21,29 @@
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.*;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.util.ToolRunner;
|
||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
/**
|
||||
* check for the job submission options of
|
||||
* -libjars -files -archives
|
||||
*/
|
||||
@Ignore
|
||||
public class TestCommandLineJobSubmission extends TestCase {
|
||||
// Input output paths for this..
|
||||
public class TestCommandLineJobSubmission {
|
||||
// Input output paths for this..
|
||||
// these are all dummy and does not test
|
||||
// much in map reduce except for the command line
|
||||
// params
|
||||
static final Path input = new Path("/test/input/");
|
||||
static final Path output = new Path("/test/output");
|
||||
File buildDir = new File(System.getProperty("test.build.data", "/tmp"));
|
||||
@Test
|
||||
public void testJobShell() throws Exception {
|
||||
MiniDFSCluster dfs = null;
|
||||
MiniMRCluster mr = null;
|
||||
|
@ -23,11 +23,12 @@
|
||||
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
||||
import org.apache.hadoop.mapreduce.lib.fieldsel.FieldSelectionHelper;
|
||||
import org.apache.hadoop.mapreduce.lib.fieldsel.TestMRFieldSelection;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import java.text.NumberFormat;
|
||||
|
||||
public class TestFieldSelection extends TestCase {
|
||||
public class TestFieldSelection {
|
||||
|
||||
private static NumberFormat idFormat = NumberFormat.getInstance();
|
||||
static {
|
||||
@ -35,6 +36,7 @@ public class TestFieldSelection extends TestCase {
|
||||
idFormat.setGroupingUsed(false);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFieldSelection() throws Exception {
|
||||
launch();
|
||||
}
|
||||
|
@ -17,12 +17,14 @@
|
||||
*/
|
||||
package org.apache.hadoop.mapred;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.fs.FileStatus;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.fs.PathFilter;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.Writer;
|
||||
@ -30,7 +32,7 @@
|
||||
import java.util.Set;
|
||||
import java.util.HashSet;
|
||||
|
||||
public class TestFileInputFormatPathFilter extends TestCase {
|
||||
public class TestFileInputFormatPathFilter {
|
||||
|
||||
public static class DummyFileInputFormat extends FileInputFormat {
|
||||
|
||||
@ -55,12 +57,12 @@ public RecordReader getRecordReader(InputSplit split, JobConf job,
|
||||
new Path(new Path(System.getProperty("test.build.data", "."), "data"),
|
||||
"TestFileInputFormatPathFilter");
|
||||
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
tearDown();
|
||||
localFs.mkdirs(workDir);
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
if (localFs.exists(workDir)) {
|
||||
localFs.delete(workDir, true);
|
||||
@ -129,18 +131,19 @@ private void _testInputFiles(boolean withFilter, boolean withGlob) throws Except
|
||||
assertEquals(createdFiles, computedFiles);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWithoutPathFilterWithoutGlob() throws Exception {
|
||||
_testInputFiles(false, false);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWithoutPathFilterWithGlob() throws Exception {
|
||||
_testInputFiles(false, true);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWithPathFilterWithoutGlob() throws Exception {
|
||||
_testInputFiles(true, false);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWithPathFilterWithGlob() throws Exception {
|
||||
_testInputFiles(true, true);
|
||||
}
|
||||
|
@ -20,10 +20,11 @@
|
||||
import org.apache.hadoop.fs.BlockLocation;
|
||||
import org.apache.hadoop.net.NetworkTopology;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
public class TestGetSplitHosts extends TestCase {
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class TestGetSplitHosts {
|
||||
@Test
|
||||
public void testGetSplitHosts() throws Exception {
|
||||
|
||||
int numBlocks = 3;
|
||||
|
@ -21,11 +21,12 @@
|
||||
import org.apache.hadoop.fs.ChecksumException;
|
||||
import org.apache.hadoop.io.DataInputBuffer;
|
||||
import org.apache.hadoop.io.DataOutputBuffer;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.fail;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
public class TestIFileStreams extends TestCase {
|
||||
|
||||
public class TestIFileStreams {
|
||||
@Test
|
||||
public void testIFileStream() throws Exception {
|
||||
final int DLEN = 100;
|
||||
DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4);
|
||||
@ -42,7 +43,7 @@ public void testIFileStream() throws Exception {
|
||||
}
|
||||
ifis.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBadIFileStream() throws Exception {
|
||||
final int DLEN = 100;
|
||||
DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4);
|
||||
@ -73,7 +74,7 @@ public void testBadIFileStream() throws Exception {
|
||||
}
|
||||
fail("Did not detect bad data in checksum");
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBadLength() throws Exception {
|
||||
final int DLEN = 100;
|
||||
DataOutputBuffer dob = new DataOutputBuffer(DLEN + 4);
|
||||
|
@ -17,14 +17,15 @@
|
||||
*/
|
||||
package org.apache.hadoop.mapred;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.mapred.FileInputFormat;
|
||||
import org.apache.hadoop.mapred.JobConf;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
public class TestInputPath extends TestCase {
|
||||
public class TestInputPath {
|
||||
@Test
|
||||
public void testInputPath() throws Exception {
|
||||
JobConf jobConf = new JobConf();
|
||||
Path workingDir = jobConf.getWorkingDirectory();
|
||||
|
@ -26,8 +26,6 @@
|
||||
import java.util.Iterator;
|
||||
import java.util.StringTokenizer;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.FileUtil;
|
||||
@ -36,8 +34,11 @@
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.io.serializer.JavaSerializationComparator;
|
||||
import org.apache.hadoop.mapreduce.MRConfig;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
public class TestJavaSerialization extends TestCase {
|
||||
public class TestJavaSerialization {
|
||||
|
||||
private static String TEST_ROOT_DIR =
|
||||
new File(System.getProperty("test.build.data", "/tmp")).toURI()
|
||||
@ -90,7 +91,7 @@ private void cleanAndCreateInput(FileSystem fs) throws IOException {
|
||||
wr.write("b a\n");
|
||||
wr.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMapReduceJob() throws Exception {
|
||||
|
||||
JobConf conf = new JobConf(TestJavaSerialization.class);
|
||||
@ -149,6 +150,7 @@ public void testMapReduceJob() throws Exception {
|
||||
* coupled to Writable types, if so, the job will fail.
|
||||
*
|
||||
*/
|
||||
@Test
|
||||
public void testWriteToSequencefile() throws Exception {
|
||||
JobConf conf = new JobConf(TestJavaSerialization.class);
|
||||
conf.setJobName("JavaSerialization");
|
||||
|
@ -29,8 +29,13 @@
|
||||
import org.apache.hadoop.io.LongWritable;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.mapred.lib.IdentityMapper;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNull;
|
||||
|
||||
public class TestJobName extends ClusterMapReduceTestCase {
|
||||
|
||||
@Test
|
||||
public void testComplexName() throws Exception {
|
||||
OutputStream os = getFileSystem().create(new Path(getInputDir(),
|
||||
"text.txt"));
|
||||
@ -65,6 +70,7 @@ public void testComplexName() throws Exception {
|
||||
reader.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testComplexNameWithRegex() throws Exception {
|
||||
OutputStream os = getFileSystem().create(new Path(getInputDir(),
|
||||
"text.txt"));
|
||||
|
@ -21,8 +21,6 @@
|
||||
import java.io.DataOutputStream;
|
||||
import java.io.IOException;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||
@ -32,11 +30,15 @@
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
||||
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
|
||||
/**
|
||||
* A JUnit test to test Job System Directory with Mini-DFS.
|
||||
*/
|
||||
public class TestJobSysDirWithDFS extends TestCase {
|
||||
public class TestJobSysDirWithDFS {
|
||||
private static final Log LOG =
|
||||
LogFactory.getLog(TestJobSysDirWithDFS.class.getName());
|
||||
|
||||
@ -115,7 +117,7 @@ static void runWordCount(MiniMRCluster mr, JobConf jobConf, String sysDir)
|
||||
// between Job Client & Job Tracker
|
||||
assertTrue(result.job.isSuccessful());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWithDFS() throws IOException {
|
||||
MiniDFSCluster dfs = null;
|
||||
MiniMRCluster mr = null;
|
||||
|
@ -20,7 +20,6 @@
|
||||
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.commons.logging.*;
|
||||
import org.apache.hadoop.fs.*;
|
||||
@ -28,8 +27,11 @@
|
||||
import org.apache.hadoop.io.compress.*;
|
||||
import org.apache.hadoop.util.LineReader;
|
||||
import org.apache.hadoop.util.ReflectionUtils;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
|
||||
public class TestKeyValueTextInputFormat extends TestCase {
|
||||
public class TestKeyValueTextInputFormat {
|
||||
private static final Log LOG =
|
||||
LogFactory.getLog(TestKeyValueTextInputFormat.class.getName());
|
||||
|
||||
@ -47,7 +49,7 @@ public class TestKeyValueTextInputFormat extends TestCase {
|
||||
private static Path workDir =
|
||||
new Path(new Path(System.getProperty("test.build.data", "."), "data"),
|
||||
"TestKeyValueTextInputFormat");
|
||||
|
||||
@Test
|
||||
public void testFormat() throws Exception {
|
||||
JobConf job = new JobConf();
|
||||
Path file = new Path(workDir, "test.txt");
|
||||
@ -134,7 +136,7 @@ private LineReader makeStream(String str) throws IOException {
|
||||
(str.getBytes("UTF-8")),
|
||||
defaultConf);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testUTF8() throws Exception {
|
||||
LineReader in = null;
|
||||
|
||||
@ -153,7 +155,7 @@ public void testUTF8() throws Exception {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNewLines() throws Exception {
|
||||
LineReader in = null;
|
||||
try {
|
||||
@ -219,7 +221,8 @@ private static List<Text> readSplit(KeyValueTextInputFormat format,
|
||||
/**
|
||||
* Test using the gzip codec for reading
|
||||
*/
|
||||
public static void testGzip() throws IOException {
|
||||
@Test
|
||||
public void testGzip() throws IOException {
|
||||
JobConf job = new JobConf();
|
||||
CompressionCodec gzip = new GzipCodec();
|
||||
ReflectionUtils.setConf(gzip, job);
|
||||
|
@ -35,14 +35,15 @@
|
||||
import org.apache.hadoop.io.Writable;
|
||||
import org.apache.hadoop.io.WritableComparable;
|
||||
import org.apache.hadoop.mapred.lib.LazyOutputFormat;
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
/**
|
||||
* A JUnit test to test the Map-Reduce framework's feature to create part
|
||||
* files only if there is an explicit output.collect. This helps in preventing
|
||||
* 0 byte files
|
||||
*/
|
||||
public class TestLazyOutput extends TestCase {
|
||||
public class TestLazyOutput {
|
||||
private static final int NUM_HADOOP_SLAVES = 3;
|
||||
private static final int NUM_MAPS_PER_NODE = 2;
|
||||
private static final Path INPUT = new Path("/testlazy/input");
|
||||
@ -132,7 +133,7 @@ public void createInput(FileSystem fs, int numMappers) throws Exception {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testLazyOutput() throws Exception {
|
||||
MiniDFSCluster dfs = null;
|
||||
MiniMRCluster mr = null;
|
||||
|
@ -17,16 +17,6 @@
|
||||
*/
|
||||
package org.apache.hadoop.mapred;
|
||||
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
import java.io.DataOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.BlockLocation;
|
||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||
@ -36,9 +26,21 @@
|
||||
import org.apache.hadoop.hdfs.DFSTestUtil;
|
||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.junit.After;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.DataOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.util.concurrent.TimeoutException;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
@SuppressWarnings("deprecation")
|
||||
public class TestMRCJCFileInputFormat extends TestCase {
|
||||
public class TestMRCJCFileInputFormat {
|
||||
|
||||
Configuration conf = new Configuration();
|
||||
MiniDFSCluster dfs = null;
|
||||
@ -50,6 +52,7 @@ private MiniDFSCluster newDFSCluster(JobConf conf) throws Exception {
|
||||
.build();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLocality() throws Exception {
|
||||
JobConf job = new JobConf(conf);
|
||||
dfs = newDFSCluster(job);
|
||||
@ -109,6 +112,7 @@ private void createInputs(FileSystem fs, Path inDir, String fileName)
|
||||
DFSTestUtil.waitReplication(fs, path, replication);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNumInputs() throws Exception {
|
||||
JobConf job = new JobConf(conf);
|
||||
dfs = newDFSCluster(job);
|
||||
@ -157,6 +161,7 @@ public RecordReader<Text, Text> getRecordReader(InputSplit split,
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMultiLevelInput() throws Exception {
|
||||
JobConf job = new JobConf(conf);
|
||||
|
||||
@ -195,6 +200,7 @@ public void testMultiLevelInput() throws Exception {
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
@Test
|
||||
public void testLastInputSplitAtSplitBoundary() throws Exception {
|
||||
FileInputFormat fif = new FileInputFormatForTest(1024l * 1024 * 1024,
|
||||
128l * 1024 * 1024);
|
||||
@ -208,6 +214,7 @@ public void testLastInputSplitAtSplitBoundary() throws Exception {
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
@Test
|
||||
public void testLastInputSplitExceedingSplitBoundary() throws Exception {
|
||||
FileInputFormat fif = new FileInputFormatForTest(1027l * 1024 * 1024,
|
||||
128l * 1024 * 1024);
|
||||
@ -221,6 +228,7 @@ public void testLastInputSplitExceedingSplitBoundary() throws Exception {
|
||||
}
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
@Test
|
||||
public void testLastInputSplitSingleSplit() throws Exception {
|
||||
FileInputFormat fif = new FileInputFormatForTest(100l * 1024 * 1024,
|
||||
128l * 1024 * 1024);
|
||||
@ -305,7 +313,7 @@ static void writeFile(Configuration conf, Path name,
|
||||
DFSTestUtil.waitReplication(fileSys, name, replication);
|
||||
}
|
||||
|
||||
@Override
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
if (dfs != null) {
|
||||
dfs.shutdown();
|
||||
|
@ -18,18 +18,25 @@
|
||||
|
||||
package org.apache.hadoop.mapred;
|
||||
|
||||
import java.io.*;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.FileUtil;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.fs.RawLocalFileSystem;
|
||||
import org.apache.hadoop.io.NullWritable;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.mapreduce.JobStatus;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import org.apache.hadoop.fs.*;
|
||||
import org.apache.hadoop.io.*;
|
||||
import org.apache.hadoop.mapred.JobContextImpl;
|
||||
import org.apache.hadoop.mapred.TaskAttemptContextImpl;
|
||||
import org.apache.hadoop.mapreduce.JobStatus;
|
||||
|
||||
public class TestMRCJCFileOutputCommitter extends TestCase {
|
||||
public class TestMRCJCFileOutputCommitter {
|
||||
private static Path outDir = new Path(
|
||||
System.getProperty("test.build.data", "/tmp"), "output");
|
||||
|
||||
@ -67,6 +74,7 @@ private void setConfForFileOutputCommitter(JobConf job) {
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Test
|
||||
public void testCommitter() throws Exception {
|
||||
JobConf job = new JobConf();
|
||||
setConfForFileOutputCommitter(job);
|
||||
@ -108,6 +116,7 @@ public void testCommitter() throws Exception {
|
||||
FileUtil.fullyDelete(new File(outDir.toString()));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAbort() throws IOException {
|
||||
JobConf job = new JobConf();
|
||||
setConfForFileOutputCommitter(job);
|
||||
@ -161,6 +170,7 @@ public boolean delete(Path p, boolean recursive) throws IOException {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFailAbort() throws IOException {
|
||||
JobConf job = new JobConf();
|
||||
job.set(FileSystem.FS_DEFAULT_NAME_KEY, "faildel:///");
|
||||
|
@ -22,8 +22,6 @@
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
@ -40,6 +38,8 @@
|
||||
import org.apache.hadoop.mapreduce.split.JobSplitWriter;
|
||||
import org.apache.hadoop.mapreduce.split.SplitMetaInfoReader;
|
||||
import org.apache.hadoop.util.ReflectionUtils;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
/**
|
||||
* Validates map phase progress.
|
||||
@ -55,7 +55,7 @@
|
||||
* once mapTask.run() is finished. Sort phase progress in map task is not
|
||||
* validated here.
|
||||
*/
|
||||
public class TestMapProgress extends TestCase {
|
||||
public class TestMapProgress {
|
||||
public static final Log LOG = LogFactory.getLog(TestMapProgress.class);
|
||||
private static String TEST_ROOT_DIR;
|
||||
static {
|
||||
@ -234,7 +234,8 @@ private void createInputFile(Path rootDir) throws IOException {
|
||||
/**
|
||||
* Validates map phase progress after each record is processed by map task
|
||||
* using custom task reporter.
|
||||
*/
|
||||
*/
|
||||
@Test
|
||||
public void testMapProgress() throws Exception {
|
||||
JobConf job = new JobConf();
|
||||
fs = FileSystem.getLocal(job);
|
||||
|
@ -44,8 +44,8 @@
|
||||
import org.apache.hadoop.io.serializer.Serializer;
|
||||
|
||||
import org.apache.hadoop.mapred.Task.TaskReporter;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
@SuppressWarnings(value={"unchecked", "deprecation"})
|
||||
/**
|
||||
@ -56,7 +56,7 @@
|
||||
* framework's merge on the reduce side will merge the partitions created to
|
||||
* generate the final output which is sorted on the key.
|
||||
*/
|
||||
public class TestMerge extends TestCase {
|
||||
public class TestMerge {
|
||||
private static final int NUM_HADOOP_DATA_NODES = 2;
|
||||
// Number of input files is same as the number of mappers.
|
||||
private static final int NUM_MAPPERS = 10;
|
||||
@ -69,6 +69,7 @@ public class TestMerge extends TestCase {
|
||||
// Where output goes.
|
||||
private static final Path OUTPUT = new Path("/testplugin/output");
|
||||
|
||||
@Test
|
||||
public void testMerge() throws Exception {
|
||||
MiniDFSCluster dfsCluster = null;
|
||||
MiniMRClientCluster mrCluster = null;
|
||||
|
@ -18,14 +18,16 @@
|
||||
|
||||
package org.apache.hadoop.mapred;
|
||||
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import junit.framework.TestCase;
|
||||
|
||||
/**
|
||||
* A Unit-test to test bringup and shutdown of Mini Map-Reduce Cluster.
|
||||
*/
|
||||
public class TestMiniMRBringup extends TestCase {
|
||||
public class TestMiniMRBringup {
|
||||
|
||||
@Test
|
||||
public void testBringUp() throws IOException {
|
||||
MiniMRCluster mr = null;
|
||||
try {
|
||||
|
@ -18,20 +18,23 @@
|
||||
|
||||
package org.apache.hadoop.mapred;
|
||||
|
||||
import java.io.*;
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.mapred.MRCaching.TestResult;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* A JUnit test to test caching with DFS
|
||||
*
|
||||
*/
|
||||
@Ignore
|
||||
public class TestMiniMRDFSCaching extends TestCase {
|
||||
public class TestMiniMRDFSCaching {
|
||||
|
||||
@Test
|
||||
public void testWithDFS() throws IOException {
|
||||
MiniMRCluster mr = null;
|
||||
MiniDFSCluster dfs = null;
|
||||
@ -70,9 +73,4 @@ public void testWithDFS() throws IOException {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void main(String[] argv) throws Exception {
|
||||
TestMiniMRDFSCaching td = new TestMiniMRDFSCaching();
|
||||
td.testWithDFS();
|
||||
}
|
||||
}
|
||||
|
@ -21,17 +21,17 @@
|
||||
import java.util.BitSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.Random;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
|
||||
public class TestMultiFileInputFormat extends TestCase{
|
||||
public class TestMultiFileInputFormat {
|
||||
|
||||
private static JobConf job = new JobConf();
|
||||
|
||||
@ -79,7 +79,8 @@ private Path initFiles(FileSystem fs, int numFiles, int numBytes) throws IOExcep
|
||||
FileInputFormat.setInputPaths(job, multiFileDir);
|
||||
return multiFileDir;
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testFormat() throws IOException {
|
||||
LOG.info("Test started");
|
||||
LOG.info("Max split count = " + MAX_SPLIT_COUNT);
|
||||
@ -122,7 +123,8 @@ public void testFormat() throws IOException {
|
||||
}
|
||||
LOG.info("Test Finished");
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testFormatWithLessPathsThanSplits() throws Exception {
|
||||
MultiFileInputFormat<Text,Text> format = new DummyMultiFileInputFormat();
|
||||
FileSystem fs = FileSystem.getLocal(job);
|
||||
@ -135,9 +137,4 @@ public void testFormatWithLessPathsThanSplits() throws Exception {
|
||||
initFiles(fs, 2, 500);
|
||||
assertEquals(2, format.getSplits(job, 4).length);
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception{
|
||||
TestMultiFileInputFormat test = new TestMultiFileInputFormat();
|
||||
test.testFormat();
|
||||
}
|
||||
}
|
||||
|
@ -27,16 +27,19 @@
|
||||
import java.io.OutputStream;
|
||||
import java.util.Arrays;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.IOUtils;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
/**
|
||||
*
|
||||
* test MultiFileSplit class
|
||||
*/
|
||||
public class TestMultiFileSplit extends TestCase{
|
||||
public class TestMultiFileSplit {
|
||||
|
||||
@Test
|
||||
public void testReadWrite() throws Exception {
|
||||
MultiFileSplit split = new MultiFileSplit(new JobConf(), new Path[] {new Path("/test/path/1"), new Path("/test/path/2")}, new long[] {100,200});
|
||||
|
||||
@ -70,6 +73,7 @@ public void testReadWrite() throws Exception {
|
||||
* test method getLocations
|
||||
* @throws IOException
|
||||
*/
|
||||
@Test
|
||||
public void testgetLocations() throws IOException{
|
||||
JobConf job= new JobConf();
|
||||
|
||||
|
@ -17,10 +17,6 @@
|
||||
*/
|
||||
package org.apache.hadoop.mapred;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
@ -32,12 +28,17 @@
|
||||
import org.apache.hadoop.mapreduce.JobCounter;
|
||||
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
/**
|
||||
* This test checks whether the task caches are created and used properly.
|
||||
*/
|
||||
@Ignore
|
||||
public class TestMultipleLevelCaching extends TestCase {
|
||||
public class TestMultipleLevelCaching {
|
||||
private static final int MAX_LEVEL = 5;
|
||||
final Path inDir = new Path("/cachetesting");
|
||||
final Path outputPath = new Path("/output");
|
||||
@ -71,6 +72,7 @@ private static String getRack(int id, int level) {
|
||||
return rack.toString();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMultiLevelCaching() throws Exception {
|
||||
for (int i = 1 ; i <= MAX_LEVEL; ++i) {
|
||||
testCachingAtLevel(i);
|
||||
|
@ -18,15 +18,19 @@
|
||||
|
||||
package org.apache.hadoop.mapred;
|
||||
|
||||
import java.io.*;
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.mapred.lib.MultipleTextOutputFormat;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.hadoop.fs.*;
|
||||
import org.apache.hadoop.io.*;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.mapred.lib.*;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
public class TestMultipleTextOutputFormat extends TestCase {
|
||||
public class TestMultipleTextOutputFormat {
|
||||
private static JobConf defaultConf = new JobConf();
|
||||
|
||||
private static FileSystem localFs = null;
|
||||
@ -83,7 +87,8 @@ private static void test2(JobConf job) throws IOException {
|
||||
writeData(rw);
|
||||
rw.close(null);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testFormat() throws Exception {
|
||||
JobConf job = new JobConf();
|
||||
job.set(JobContext.TASK_ATTEMPT_ID, attempt);
|
||||
@ -145,8 +150,4 @@ public void testFormat() throws Exception {
|
||||
//System.out.printf("File_2 output: %s\n", output);
|
||||
assertEquals(output, expectedOutput.toString());
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
new TestMultipleTextOutputFormat().testFormat();
|
||||
}
|
||||
}
|
||||
|
@ -19,17 +19,18 @@
|
||||
package org.apache.hadoop.mapred;
|
||||
|
||||
import org.apache.hadoop.mapreduce.TaskCounter;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class TestReduceFetch extends TestReduceFetchFromPartialMem {
|
||||
|
||||
static {
|
||||
setSuite(TestReduceFetch.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* Verify that all segments are read from disk
|
||||
* @throws Exception might be thrown
|
||||
*/
|
||||
@Test
|
||||
public void testReduceFromDisk() throws Exception {
|
||||
final int MAP_TASKS = 8;
|
||||
JobConf job = mrCluster.createJobConf();
|
||||
@ -53,6 +54,7 @@ public void testReduceFromDisk() throws Exception {
|
||||
* Verify that no segment hits disk.
|
||||
* @throws Exception might be thrown
|
||||
*/
|
||||
@Test
|
||||
public void testReduceFromMem() throws Exception {
|
||||
final int MAP_TASKS = 3;
|
||||
JobConf job = mrCluster.createJobConf();
|
||||
|
@ -18,10 +18,6 @@
|
||||
|
||||
package org.apache.hadoop.mapred;
|
||||
|
||||
import junit.extensions.TestSetup;
|
||||
import junit.framework.Test;
|
||||
import junit.framework.TestCase;
|
||||
import junit.framework.TestSuite;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
@ -30,7 +26,9 @@
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.io.WritableComparator;
|
||||
import org.apache.hadoop.mapreduce.TaskCounter;
|
||||
import org.apache.hadoop.mapreduce.MRConfig;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.DataInput;
|
||||
import java.io.DataOutput;
|
||||
@ -39,34 +37,27 @@
|
||||
import java.util.Formatter;
|
||||
import java.util.Iterator;
|
||||
|
||||
public class TestReduceFetchFromPartialMem extends TestCase {
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
public class TestReduceFetchFromPartialMem {
|
||||
|
||||
protected static MiniMRCluster mrCluster = null;
|
||||
protected static MiniDFSCluster dfsCluster = null;
|
||||
protected static TestSuite mySuite;
|
||||
|
||||
protected static void setSuite(Class<? extends TestCase> klass) {
|
||||
mySuite = new TestSuite(klass);
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
Configuration conf = new Configuration();
|
||||
dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
|
||||
mrCluster = new MiniMRCluster(2,
|
||||
dfsCluster.getFileSystem().getUri().toString(), 1);
|
||||
}
|
||||
|
||||
static {
|
||||
setSuite(TestReduceFetchFromPartialMem.class);
|
||||
}
|
||||
|
||||
public static Test suite() {
|
||||
TestSetup setup = new TestSetup(mySuite) {
|
||||
protected void setUp() throws Exception {
|
||||
Configuration conf = new Configuration();
|
||||
dfsCluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
|
||||
mrCluster = new MiniMRCluster(2,
|
||||
dfsCluster.getFileSystem().getUri().toString(), 1);
|
||||
}
|
||||
protected void tearDown() throws Exception {
|
||||
if (dfsCluster != null) { dfsCluster.shutdown(); }
|
||||
if (mrCluster != null) { mrCluster.shutdown(); }
|
||||
}
|
||||
};
|
||||
return setup;
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
if (dfsCluster != null) { dfsCluster.shutdown(); }
|
||||
if (mrCluster != null) { mrCluster.shutdown(); }
|
||||
}
|
||||
|
||||
private static final String tagfmt = "%04d";
|
||||
@ -78,6 +69,7 @@ private static int getValLen(int id, int nMaps) {
|
||||
}
|
||||
|
||||
/** Verify that at least one segment does not hit disk */
|
||||
@Test
|
||||
public void testReduceFromPartialMem() throws Exception {
|
||||
final int MAP_TASKS = 7;
|
||||
JobConf job = mrCluster.createJobConf();
|
||||
|
@ -17,10 +17,6 @@
|
||||
*/
|
||||
package org.apache.hadoop.mapred;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.LocalFileSystem;
|
||||
@ -30,11 +26,17 @@
|
||||
import org.apache.hadoop.io.compress.CompressionCodec;
|
||||
import org.apache.hadoop.io.compress.DefaultCodec;
|
||||
import org.apache.hadoop.util.Progressable;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
/**
|
||||
* This test exercises the ValueIterator.
|
||||
*/
|
||||
public class TestReduceTask extends TestCase {
|
||||
public class TestReduceTask {
|
||||
|
||||
static class NullProgress implements Progressable {
|
||||
public void progress() { }
|
||||
@ -119,9 +121,10 @@ public void runValueIterator(Path tmpDir, Pair[] vals,
|
||||
}
|
||||
assertEquals(vals.length, i);
|
||||
// make sure we have progress equal to 1.0
|
||||
assertEquals(1.0f, rawItr.getProgress().get());
|
||||
assertEquals(1.0f, rawItr.getProgress().get(),0.0000);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testValueIterator() throws Exception {
|
||||
Path tmpDir = new Path("build/test/test.reduce.task");
|
||||
Configuration conf = new Configuration();
|
||||
@ -129,7 +132,8 @@ public void testValueIterator() throws Exception {
|
||||
runValueIterator(tmpDir, testCase, conf, null);
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testValueIteratorWithCompression() throws Exception {
|
||||
Path tmpDir = new Path("build/test/test.reduce.task.compression");
|
||||
Configuration conf = new Configuration();
|
||||
|
@ -18,19 +18,26 @@
|
||||
|
||||
package org.apache.hadoop.mapred;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.BytesWritable;
|
||||
import org.apache.hadoop.io.DataInputBuffer;
|
||||
import org.apache.hadoop.io.SequenceFile;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.hadoop.fs.*;
|
||||
import org.apache.hadoop.io.*;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.commons.logging.*;
|
||||
|
||||
public class TestSequenceFileAsBinaryInputFormat extends TestCase {
|
||||
public class TestSequenceFileAsBinaryInputFormat {
|
||||
private static final Log LOG = FileInputFormat.LOG;
|
||||
private static final int RECORDS = 10000;
|
||||
|
||||
@Test
|
||||
public void testBinary() throws IOException {
|
||||
JobConf job = new JobConf();
|
||||
FileSystem fs = FileSystem.getLocal(job);
|
||||
|
@ -18,24 +18,35 @@
|
||||
|
||||
package org.apache.hadoop.mapred;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.BooleanWritable;
|
||||
import org.apache.hadoop.io.BytesWritable;
|
||||
import org.apache.hadoop.io.DataInputBuffer;
|
||||
import org.apache.hadoop.io.DataOutputBuffer;
|
||||
import org.apache.hadoop.io.DoubleWritable;
|
||||
import org.apache.hadoop.io.FloatWritable;
|
||||
import org.apache.hadoop.io.IntWritable;
|
||||
import org.apache.hadoop.io.SequenceFile.CompressionType;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.hadoop.fs.*;
|
||||
import org.apache.hadoop.io.*;
|
||||
import org.apache.hadoop.io.SequenceFile.CompressionType;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.commons.logging.*;
|
||||
|
||||
public class TestSequenceFileAsBinaryOutputFormat extends TestCase {
|
||||
public class TestSequenceFileAsBinaryOutputFormat {
|
||||
private static final Log LOG =
|
||||
LogFactory.getLog(TestSequenceFileAsBinaryOutputFormat.class.getName());
|
||||
|
||||
private static final int RECORDS = 10000;
|
||||
// A random task attempt id for testing.
|
||||
private static final String attempt = "attempt_200707121733_0001_m_000000_0";
|
||||
|
||||
@Test
|
||||
public void testBinary() throws IOException {
|
||||
JobConf job = new JobConf();
|
||||
FileSystem fs = FileSystem.getLocal(job);
|
||||
@ -129,7 +140,8 @@ public void testBinary() throws IOException {
|
||||
assertEquals("Some records not found", RECORDS, count);
|
||||
}
|
||||
|
||||
public void testSequenceOutputClassDefaultsToMapRedOutputClass()
|
||||
@Test
|
||||
public void testSequenceOutputClassDefaultsToMapRedOutputClass()
|
||||
throws IOException {
|
||||
JobConf job = new JobConf();
|
||||
FileSystem fs = FileSystem.getLocal(job);
|
||||
@ -163,6 +175,7 @@ public void testSequenceOutputClassDefaultsToMapRedOutputClass()
|
||||
job));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testcheckOutputSpecsForbidRecordCompression() throws IOException {
|
||||
JobConf job = new JobConf();
|
||||
FileSystem fs = FileSystem.getLocal(job);
|
||||
|
@ -18,22 +18,29 @@
|
||||
|
||||
package org.apache.hadoop.mapred;
|
||||
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.IntWritable;
|
||||
import org.apache.hadoop.io.LongWritable;
|
||||
import org.apache.hadoop.io.SequenceFile;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.commons.logging.*;
|
||||
import java.util.BitSet;
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.hadoop.fs.*;
|
||||
import org.apache.hadoop.io.*;
|
||||
import org.apache.hadoop.conf.*;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
|
||||
public class TestSequenceFileAsTextInputFormat extends TestCase {
|
||||
public class TestSequenceFileAsTextInputFormat {
|
||||
private static final Log LOG = FileInputFormat.LOG;
|
||||
|
||||
private static int MAX_LENGTH = 10000;
|
||||
private static Configuration conf = new Configuration();
|
||||
|
||||
@Test
|
||||
public void testFormat() throws Exception {
|
||||
JobConf job = new JobConf(conf);
|
||||
FileSystem fs = FileSystem.getLocal(conf);
|
||||
@ -112,8 +119,4 @@ public void testFormat() throws Exception {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
new TestSequenceFileAsTextInputFormat().testFormat();
|
||||
}
|
||||
}
|
||||
|
@ -18,17 +18,21 @@
|
||||
|
||||
package org.apache.hadoop.mapred;
|
||||
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.BytesWritable;
|
||||
import org.apache.hadoop.io.SequenceFile;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.commons.logging.*;
|
||||
import java.io.IOException;
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.hadoop.fs.*;
|
||||
import org.apache.hadoop.io.*;
|
||||
import org.apache.hadoop.conf.*;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
public class TestSequenceFileInputFilter extends TestCase {
|
||||
public class TestSequenceFileInputFilter {
|
||||
private static final Log LOG = FileInputFormat.LOG;
|
||||
|
||||
private static final int MAX_LENGTH = 15000;
|
||||
@ -97,7 +101,8 @@ private int countRecords(int numSplits) throws IOException {
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testRegexFilter() throws Exception {
|
||||
// set the filter class
|
||||
LOG.info("Testing Regex Filter with patter: \\A10*");
|
||||
@ -121,6 +126,7 @@ public void testRegexFilter() throws Exception {
|
||||
fs.delete(inDir, true);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPercentFilter() throws Exception {
|
||||
LOG.info("Testing Percent Filter with frequency: 1000");
|
||||
// set the filter class
|
||||
@ -147,7 +153,8 @@ public void testPercentFilter() throws Exception {
|
||||
// clean up
|
||||
fs.delete(inDir, true);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testMD5Filter() throws Exception {
|
||||
// set the filter class
|
||||
LOG.info("Testing MD5 Filter with frequency: 1000");
|
||||
@ -168,9 +175,4 @@ public void testMD5Filter() throws Exception {
|
||||
// clean up
|
||||
fs.delete(inDir, true);
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
TestSequenceFileInputFilter filter = new TestSequenceFileInputFilter();
|
||||
filter.testRegexFilter();
|
||||
}
|
||||
}
|
||||
|
@ -18,22 +18,28 @@
|
||||
|
||||
package org.apache.hadoop.mapred;
|
||||
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.BytesWritable;
|
||||
import org.apache.hadoop.io.IntWritable;
|
||||
import org.apache.hadoop.io.SequenceFile;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.commons.logging.*;
|
||||
import java.util.BitSet;
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.hadoop.fs.*;
|
||||
import org.apache.hadoop.io.*;
|
||||
import org.apache.hadoop.conf.*;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
|
||||
public class TestSequenceFileInputFormat extends TestCase {
|
||||
public class TestSequenceFileInputFormat {
|
||||
private static final Log LOG = FileInputFormat.LOG;
|
||||
|
||||
private static int MAX_LENGTH = 10000;
|
||||
private static Configuration conf = new Configuration();
|
||||
|
||||
@Test
|
||||
public void testFormat() throws Exception {
|
||||
JobConf job = new JobConf(conf);
|
||||
FileSystem fs = FileSystem.getLocal(conf);
|
||||
@ -110,7 +116,6 @@ public void testFormat() throws Exception {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
new TestSequenceFileInputFormat().testFormat();
|
||||
}
|
||||
|
@ -17,18 +17,20 @@
|
||||
*/
|
||||
package org.apache.hadoop.mapred;
|
||||
|
||||
import java.util.Iterator;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.mapred.SortedRanges.Range;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestSortedRanges extends TestCase {
|
||||
private static final Log LOG =
|
||||
import java.util.Iterator;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
public class TestSortedRanges {
|
||||
private static final Log LOG =
|
||||
LogFactory.getLog(TestSortedRanges.class);
|
||||
|
||||
|
||||
@Test
|
||||
public void testAdd() {
|
||||
SortedRanges sr = new SortedRanges();
|
||||
sr.add(new Range(2,9));
|
||||
@ -66,7 +68,8 @@ public void testAdd() {
|
||||
assertEquals(77, it.next().longValue());
|
||||
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testRemove() {
|
||||
SortedRanges sr = new SortedRanges();
|
||||
sr.add(new Range(2,19));
|
||||
|
@ -18,12 +18,6 @@
|
||||
|
||||
package org.apache.hadoop.mapred;
|
||||
|
||||
import java.io.DataOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
@ -34,14 +28,20 @@
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.mapred.lib.IdentityMapper;
|
||||
import org.apache.hadoop.mapred.lib.IdentityReducer;
|
||||
import org.apache.hadoop.mapreduce.MRConfig;
|
||||
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
|
||||
import org.apache.hadoop.util.Progressable;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.DataOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.net.URI;
|
||||
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
/**
|
||||
* A JUnit test to test that jobs' output filenames are not HTML-encoded (cf HADOOP-1795).
|
||||
*/
|
||||
public class TestSpecialCharactersInOutputPath extends TestCase {
|
||||
public class TestSpecialCharactersInOutputPath {
|
||||
private static final Log LOG =
|
||||
LogFactory.getLog(TestSpecialCharactersInOutputPath.class.getName());
|
||||
|
||||
@ -96,7 +96,8 @@ public static boolean launchJob(URI fileSys,
|
||||
LOG.info("job is complete: " + runningJob.isSuccessful());
|
||||
return (runningJob.isSuccessful());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testJobWithDFS() throws IOException {
|
||||
String namenode = null;
|
||||
MiniDFSCluster dfs = null;
|
||||
|
@ -19,14 +19,18 @@
|
||||
|
||||
import java.util.Map;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.mapred.StatisticsCollector.TimeWindow;
|
||||
import org.apache.hadoop.mapred.StatisticsCollector.Stat;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestStatisticsCollector extends TestCase{
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertNull;
|
||||
|
||||
public class TestStatisticsCollector {
|
||||
|
||||
@SuppressWarnings("rawtypes")
|
||||
@Test
|
||||
public void testMovingWindow() throws Exception {
|
||||
StatisticsCollector collector = new StatisticsCollector(1);
|
||||
TimeWindow window = new TimeWindow("test", 6, 2);
|
||||
|
@ -17,6 +17,15 @@
|
||||
*/
|
||||
package org.apache.hadoop.mapred;
|
||||
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.FileUtil;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.LongWritable;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.mapred.lib.IdentityMapper;
|
||||
import org.apache.hadoop.mapred.lib.IdentityReducer;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
@ -26,18 +35,10 @@
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.Writer;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.FileUtil;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.LongWritable;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.mapred.lib.IdentityMapper;
|
||||
import org.apache.hadoop.mapred.lib.IdentityReducer;
|
||||
|
||||
public class TestUserDefinedCounters extends TestCase {
|
||||
|
||||
public class TestUserDefinedCounters {
|
||||
private static String TEST_ROOT_DIR =
|
||||
new File(System.getProperty("test.build.data", "/tmp")).toURI()
|
||||
.toString().replace(' ', '+')
|
||||
@ -75,6 +76,7 @@ private void cleanAndCreateInput(FileSystem fs) throws IOException {
|
||||
wr.close();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testMapReduceJob() throws Exception {
|
||||
|
||||
JobConf conf = new JobConf(TestUserDefinedCounters.class);
|
||||
|
@ -18,12 +18,6 @@
|
||||
|
||||
package org.apache.hadoop.mapred;
|
||||
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.io.DataInputBuffer;
|
||||
import org.apache.hadoop.io.DataOutputBuffer;
|
||||
@ -31,8 +25,15 @@
|
||||
import org.apache.hadoop.io.serializer.SerializationFactory;
|
||||
import org.apache.hadoop.io.serializer.Serializer;
|
||||
import org.apache.hadoop.util.GenericsUtil;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestWritableJobConf extends TestCase {
|
||||
import java.util.HashMap;
|
||||
import java.util.Iterator;
|
||||
import java.util.Map;
|
||||
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class TestWritableJobConf {
|
||||
|
||||
private static final Configuration CONF = new Configuration();
|
||||
|
||||
@ -78,15 +79,17 @@ private void assertEquals(Configuration conf1, Configuration conf2) {
|
||||
}
|
||||
}
|
||||
|
||||
assertEquals(map1, map2);
|
||||
assertTrue(map1.equals(map2));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmptyConfiguration() throws Exception {
|
||||
JobConf conf = new JobConf();
|
||||
Configuration deser = serDeser(conf);
|
||||
assertEquals(conf, deser);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNonEmptyConfiguration() throws Exception {
|
||||
JobConf conf = new JobConf();
|
||||
conf.set("a", "A");
|
||||
@ -95,6 +98,7 @@ public void testNonEmptyConfiguration() throws Exception {
|
||||
assertEquals(conf, deser);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testConfigurationWithDefaults() throws Exception {
|
||||
JobConf conf = new JobConf(false);
|
||||
conf.set("a", "A");
|
||||
|
@ -18,6 +18,10 @@
|
||||
|
||||
package org.apache.hadoop.mapred;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Mockito.doAnswer;
|
||||
import static org.mockito.Mockito.doReturn;
|
||||
@ -38,8 +42,6 @@
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
@ -113,7 +115,7 @@
|
||||
* Test YarnRunner and make sure the client side plugin works
|
||||
* fine
|
||||
*/
|
||||
public class TestYARNRunner extends TestCase {
|
||||
public class TestYARNRunner {
|
||||
private static final Log LOG = LogFactory.getLog(TestYARNRunner.class);
|
||||
private static final RecordFactory recordFactory = RecordFactoryProvider.getRecordFactory(null);
|
||||
|
||||
|
@ -22,11 +22,6 @@
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
||||
import junit.framework.Test;
|
||||
import junit.framework.TestCase;
|
||||
import junit.framework.TestSuite;
|
||||
import junit.extensions.TestSetup;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileStatus;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
@ -54,23 +49,27 @@
|
||||
import org.apache.hadoop.mapred.lib.IdentityMapper;
|
||||
import org.apache.hadoop.mapred.lib.IdentityReducer;
|
||||
import org.apache.hadoop.util.ReflectionUtils;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
|
||||
public class TestDatamerge extends TestCase {
|
||||
public class TestDatamerge {
|
||||
|
||||
private static MiniDFSCluster cluster = null;
|
||||
public static Test suite() {
|
||||
TestSetup setup = new TestSetup(new TestSuite(TestDatamerge.class)) {
|
||||
protected void setUp() throws Exception {
|
||||
Configuration conf = new Configuration();
|
||||
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
|
||||
}
|
||||
protected void tearDown() throws Exception {
|
||||
if (cluster != null) {
|
||||
cluster.shutdown();
|
||||
}
|
||||
}
|
||||
};
|
||||
return setup;
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
Configuration conf = new Configuration();
|
||||
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
|
||||
}
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
if (cluster != null) {
|
||||
cluster.shutdown();
|
||||
}
|
||||
}
|
||||
|
||||
private static SequenceFile.Writer[] createWriters(Path testdir,
|
||||
@ -246,18 +245,22 @@ private static void joinAs(String jointype,
|
||||
base.getFileSystem(job).delete(base, true);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSimpleInnerJoin() throws Exception {
|
||||
joinAs("inner", InnerJoinChecker.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSimpleOuterJoin() throws Exception {
|
||||
joinAs("outer", OuterJoinChecker.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSimpleOverride() throws Exception {
|
||||
joinAs("override", OverrideChecker.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNestedJoin() throws Exception {
|
||||
// outer(inner(S1,...,Sn),outer(S1,...Sn))
|
||||
final int SOURCES = 3;
|
||||
@ -350,6 +353,7 @@ public void testNestedJoin() throws Exception {
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmptyJoin() throws Exception {
|
||||
JobConf job = new JobConf();
|
||||
Path base = cluster.getFileSystem().makeQualified(new Path("/empty"));
|
||||
|
@ -26,8 +26,6 @@
|
||||
import java.util.Arrays;
|
||||
import java.util.Random;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.io.BooleanWritable;
|
||||
import org.apache.hadoop.io.BytesWritable;
|
||||
import org.apache.hadoop.io.FloatWritable;
|
||||
@ -36,8 +34,12 @@
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.io.Writable;
|
||||
import org.apache.hadoop.io.WritableUtils;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
|
||||
public class TestTupleWritable extends TestCase {
|
||||
public class TestTupleWritable {
|
||||
|
||||
private TupleWritable makeTuple(Writable[] writs) {
|
||||
Writable[] sub1 = { writs[1], writs[2] };
|
||||
@ -100,6 +102,7 @@ private int verifIter(Writable[] writs, TupleWritable t, int i) {
|
||||
return i;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIterable() throws Exception {
|
||||
Random r = new Random();
|
||||
Writable[] writs = {
|
||||
@ -121,6 +124,7 @@ public void testIterable() throws Exception {
|
||||
verifIter(writs, t, 0);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNestedIterable() throws Exception {
|
||||
Random r = new Random();
|
||||
Writable[] writs = {
|
||||
@ -139,6 +143,7 @@ public void testNestedIterable() throws Exception {
|
||||
assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWritable() throws Exception {
|
||||
Random r = new Random();
|
||||
Writable[] writs = {
|
||||
@ -162,6 +167,7 @@ public void testWritable() throws Exception {
|
||||
assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWideWritable() throws Exception {
|
||||
Writable[] manyWrits = makeRandomWritables(131);
|
||||
|
||||
@ -180,7 +186,8 @@ public void testWideWritable() throws Exception {
|
||||
assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
|
||||
assertEquals("All tuple data has not been read from the stream",-1,in.read());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testWideWritable2() throws Exception {
|
||||
Writable[] manyWrits = makeRandomWritables(71);
|
||||
|
||||
@ -202,6 +209,7 @@ public void testWideWritable2() throws Exception {
|
||||
* Tests a tuple writable with more than 64 values and the values set written
|
||||
* spread far apart.
|
||||
*/
|
||||
@Test
|
||||
public void testSparseWideWritable() throws Exception {
|
||||
Writable[] manyWrits = makeRandomWritables(131);
|
||||
|
||||
@ -220,7 +228,7 @@ public void testSparseWideWritable() throws Exception {
|
||||
assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
|
||||
assertEquals("All tuple data has not been read from the stream",-1,in.read());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWideTuple() throws Exception {
|
||||
Text emptyText = new Text("Should be empty");
|
||||
Writable[] values = new Writable[64];
|
||||
@ -240,7 +248,7 @@ public void testWideTuple() throws Exception {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWideTuple2() throws Exception {
|
||||
Text emptyText = new Text("Should be empty");
|
||||
Writable[] values = new Writable[64];
|
||||
@ -264,6 +272,7 @@ public void testWideTuple2() throws Exception {
|
||||
/**
|
||||
* Tests that we can write more than 64 values.
|
||||
*/
|
||||
@Test
|
||||
public void testWideTupleBoundary() throws Exception {
|
||||
Text emptyText = new Text("Should not be set written");
|
||||
Writable[] values = new Writable[65];
|
||||
@ -287,6 +296,7 @@ public void testWideTupleBoundary() throws Exception {
|
||||
/**
|
||||
* Tests compatibility with pre-0.21 versions of TupleWritable
|
||||
*/
|
||||
@Test
|
||||
public void testPreVersion21Compatibility() throws Exception {
|
||||
Writable[] manyWrits = makeRandomWritables(64);
|
||||
PreVersion21TupleWritable oldTuple = new PreVersion21TupleWritable(manyWrits);
|
||||
@ -304,7 +314,7 @@ public void testPreVersion21Compatibility() throws Exception {
|
||||
assertTrue("Tuple writable is unable to read pre-0.21 versions of TupleWritable", oldTuple.isCompatible(dTuple));
|
||||
assertEquals("All tuple data has not been read from the stream",-1,in.read());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPreVersion21CompatibilityEmptyTuple() throws Exception {
|
||||
Writable[] manyWrits = new Writable[0];
|
||||
PreVersion21TupleWritable oldTuple = new PreVersion21TupleWritable(manyWrits);
|
||||
|
@ -21,8 +21,6 @@
|
||||
import java.io.DataOutput;
|
||||
import java.io.IOException;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.NullWritable;
|
||||
@ -35,13 +33,16 @@
|
||||
import org.apache.hadoop.mapred.RecordReader;
|
||||
import org.apache.hadoop.mapred.Reporter;
|
||||
import org.apache.hadoop.util.ReflectionUtils;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class TestWrappedRecordReaderClassloader extends TestCase {
|
||||
public class TestWrappedRecordReaderClassloader {
|
||||
/**
|
||||
* Tests the class loader set by {@link JobConf#setClassLoader(ClassLoader)}
|
||||
* is inherited by any {@link WrappedRecordReader}s created by
|
||||
* {@link CompositeRecordReader}
|
||||
*/
|
||||
@Test
|
||||
public void testClassLoader() throws Exception {
|
||||
JobConf job = new JobConf();
|
||||
Fake_ClassLoader classLoader = new Fake_ClassLoader();
|
||||
|
@ -20,8 +20,6 @@
|
||||
import java.io.DataOutputStream;
|
||||
import java.io.IOException;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||
@ -32,9 +30,12 @@
|
||||
import org.apache.hadoop.mapred.OutputCollector;
|
||||
import org.apache.hadoop.mapred.Reporter;
|
||||
import org.apache.hadoop.mapred.TextInputFormat;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
public class TestDelegatingInputFormat extends TestCase {
|
||||
|
||||
public class TestDelegatingInputFormat {
|
||||
@Test
|
||||
public void testSplitting() throws Exception {
|
||||
JobConf conf = new JobConf();
|
||||
MiniDFSCluster dfs = null;
|
||||
|
@ -20,13 +20,14 @@
|
||||
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.fs.*;
|
||||
import org.apache.hadoop.io.*;
|
||||
import org.apache.hadoop.mapred.*;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
public class TestLineInputFormat extends TestCase {
|
||||
public class TestLineInputFormat {
|
||||
private static int MAX_LENGTH = 200;
|
||||
|
||||
private static JobConf defaultConf = new JobConf();
|
||||
@ -43,7 +44,7 @@ public class TestLineInputFormat extends TestCase {
|
||||
private static Path workDir =
|
||||
new Path(new Path(System.getProperty("test.build.data", "."), "data"),
|
||||
"TestLineInputFormat");
|
||||
|
||||
@Test
|
||||
public void testFormat() throws Exception {
|
||||
JobConf job = new JobConf();
|
||||
Path file = new Path(workDir, "test.txt");
|
||||
|
@ -36,7 +36,6 @@
|
||||
* @see TestDelegatingInputFormat
|
||||
*/
|
||||
public class TestMultipleInputs {
|
||||
|
||||
@Test
|
||||
public void testAddInputPathWithFormat() {
|
||||
final JobConf conf = new JobConf();
|
||||
@ -49,7 +48,6 @@ public void testAddInputPathWithFormat() {
|
||||
assertEquals(KeyValueTextInputFormat.class, inputs.get(new Path("/bar"))
|
||||
.getClass());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAddInputPathWithMapper() {
|
||||
final JobConf conf = new JobConf();
|
||||
|
@ -22,13 +22,14 @@
|
||||
import org.apache.hadoop.mapred.*;
|
||||
import org.apache.hadoop.mapred.lib.*;
|
||||
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
import java.text.NumberFormat;
|
||||
|
||||
public class TestAggregates extends TestCase {
|
||||
public class TestAggregates {
|
||||
|
||||
private static NumberFormat idFormat = NumberFormat.getInstance();
|
||||
static {
|
||||
@ -36,7 +37,7 @@ public class TestAggregates extends TestCase {
|
||||
idFormat.setGroupingUsed(false);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testAggregates() throws Exception {
|
||||
launch();
|
||||
}
|
||||
|
@ -19,13 +19,13 @@
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.io.NullWritable;
|
||||
import org.apache.hadoop.mapred.JobConf;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNull;
|
||||
|
||||
public class TestConstructQuery extends TestCase {
|
||||
|
||||
public class TestConstructQuery {
|
||||
private String[] fieldNames = new String[] { "id", "name", "value" };
|
||||
private String[] nullFieldNames = new String[] { null, null, null };
|
||||
private String expected = "INSERT INTO hadoop_output (id,name,value) VALUES (?,?,?);";
|
||||
@ -33,15 +33,15 @@ public class TestConstructQuery extends TestCase {
|
||||
|
||||
private DBOutputFormat<DBWritable, NullWritable> format
|
||||
= new DBOutputFormat<DBWritable, NullWritable>();
|
||||
|
||||
public void testConstructQuery() {
|
||||
@Test
|
||||
public void testConstructQuery() {
|
||||
String actual = format.constructQuery("hadoop_output", fieldNames);
|
||||
assertEquals(expected, actual);
|
||||
|
||||
|
||||
actual = format.constructQuery("hadoop_output", nullFieldNames);
|
||||
assertEquals(nullExpected, actual);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSetOutput() throws IOException {
|
||||
JobConf job = new JobConf();
|
||||
DBOutputFormat.setOutput(job, "hadoop_output", fieldNames);
|
||||
|
@ -44,10 +44,13 @@
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.util.ToolRunner;
|
||||
import org.junit.Ignore;
|
||||
import org.junit.Test;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
@Ignore
|
||||
public class TestPipes extends TestCase {
|
||||
public class TestPipes {
|
||||
private static final Log LOG =
|
||||
LogFactory.getLog(TestPipes.class.getName());
|
||||
|
||||
@ -66,7 +69,7 @@ static void cleanup(FileSystem fs, Path p) throws IOException {
|
||||
fs.delete(p, true);
|
||||
assertFalse("output not cleaned up", fs.exists(p));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPipes() throws IOException {
|
||||
if (System.getProperty("compile.c++") == null) {
|
||||
LOG.info("compile.c++ is not defined, so skipping TestPipes");
|
||||
|
@ -17,36 +17,42 @@
|
||||
*/
|
||||
package org.apache.hadoop.mapreduce;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.OutputStream;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FSDataInputStream;
|
||||
import org.apache.hadoop.fs.FileStatus;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.LongWritable;
|
||||
import org.apache.hadoop.io.NullWritable;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.fs.*;
|
||||
import org.apache.hadoop.mapred.LocalJobRunner;
|
||||
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.input.FileSplit;
|
||||
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
||||
import org.apache.hadoop.util.ReflectionUtils;
|
||||
|
||||
import org.junit.Test;
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.BufferedWriter;
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.InputStreamReader;
|
||||
import java.io.OutputStream;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
/**
|
||||
* Stress tests for the LocalJobRunner
|
||||
*/
|
||||
public class TestLocalRunner extends TestCase {
|
||||
public class TestLocalRunner {
|
||||
|
||||
private static final Log LOG = LogFactory.getLog(TestLocalRunner.class);
|
||||
|
||||
|
@ -17,6 +17,23 @@
|
||||
*/
|
||||
package org.apache.hadoop.mapreduce;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.LocatedFileStatus;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.fs.RemoteIterator;
|
||||
import org.apache.hadoop.mapred.ClusterMapReduceTestCase;
|
||||
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
|
||||
import org.apache.hadoop.mapreduce.tools.CLI;
|
||||
import org.apache.hadoop.util.ExitUtil;
|
||||
import org.apache.hadoop.util.Tool;
|
||||
import org.apache.hadoop.util.ToolRunner;
|
||||
import org.codehaus.jettison.json.JSONException;
|
||||
import org.codehaus.jettison.json.JSONObject;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.BufferedReader;
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
@ -31,23 +48,11 @@
|
||||
import java.io.PrintStream;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.hadoop.fs.LocatedFileStatus;
|
||||
import org.apache.hadoop.fs.RemoteIterator;
|
||||
import org.codehaus.jettison.json.JSONException;
|
||||
import org.codehaus.jettison.json.JSONObject;
|
||||
import org.junit.Assert;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.mapred.ClusterMapReduceTestCase;
|
||||
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
|
||||
import org.apache.hadoop.mapreduce.tools.CLI;
|
||||
import org.apache.hadoop.util.ExitUtil;
|
||||
import org.apache.hadoop.util.Tool;
|
||||
import org.apache.hadoop.util.ToolRunner;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
/**
|
||||
test CLI class. CLI class implemented the Tool interface.
|
||||
@ -103,7 +108,7 @@ public void checkOutputSpecs(JobContext job) throws IOException {
|
||||
throw new IOException();
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testJobSubmissionSpecsAndFiles() throws Exception {
|
||||
Configuration conf = createJobConf();
|
||||
Job job = MapReduceTestUtil.createJob(conf, getInputDir(), getOutputDir(),
|
||||
@ -127,7 +132,7 @@ public void testJobSubmissionSpecsAndFiles() throws Exception {
|
||||
/**
|
||||
* main test method
|
||||
*/
|
||||
|
||||
@Test
|
||||
public void testJobClient() throws Exception {
|
||||
Configuration conf = createJobConf();
|
||||
Job job = runJob(conf);
|
||||
@ -180,8 +185,7 @@ private void testfailTask(Configuration conf) throws Exception {
|
||||
|
||||
runTool(conf, jc, new String[] { "-fail-task", taid.toString() }, out);
|
||||
String answer = new String(out.toByteArray(), "UTF-8");
|
||||
Assert
|
||||
.assertTrue(answer.contains("Killed task " + taid + " by failing it"));
|
||||
assertTrue(answer.contains("Killed task " + taid + " by failing it"));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -199,7 +203,7 @@ private void testKillTask(Configuration conf) throws Exception {
|
||||
|
||||
runTool(conf, jc, new String[] { "-kill-task", taid.toString() }, out);
|
||||
String answer = new String(out.toByteArray(), "UTF-8");
|
||||
Assert.assertTrue(answer.contains("Killed task " + taid));
|
||||
assertTrue(answer.contains("Killed task " + taid));
|
||||
}
|
||||
|
||||
/**
|
||||
@ -686,6 +690,7 @@ public void testChangingJobPriority(String jobId, Configuration conf)
|
||||
* Test -list option displays job name.
|
||||
* The name is capped to 20 characters for display.
|
||||
*/
|
||||
@Test
|
||||
public void testJobName() throws Exception {
|
||||
Configuration conf = createJobConf();
|
||||
CLI jc = createJobClient();
|
||||
|
@ -25,8 +25,6 @@
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.FileUtil;
|
||||
@ -42,13 +40,16 @@
|
||||
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.output.LazyOutputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
/**
|
||||
* A JUnit test to test the Map-Reduce framework's feature to create part
|
||||
* files only if there is an explicit output.collect. This helps in preventing
|
||||
* 0 byte files
|
||||
*/
|
||||
public class TestMapReduceLazyOutput extends TestCase {
|
||||
public class TestMapReduceLazyOutput {
|
||||
private static final int NUM_HADOOP_SLAVES = 3;
|
||||
private static final int NUM_MAPS_PER_NODE = 2;
|
||||
private static final Path INPUT = new Path("/testlazy/input");
|
||||
@ -122,7 +123,7 @@ public void createInput(FileSystem fs, int numMappers) throws Exception {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testLazyOutput() throws Exception {
|
||||
MiniDFSCluster dfs = null;
|
||||
MiniMRCluster mr = null;
|
||||
|
@ -27,8 +27,6 @@
|
||||
import java.util.ArrayList;
|
||||
import java.util.StringTokenizer;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
@ -43,12 +41,15 @@
|
||||
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
/**
|
||||
* A JUnit test to test the Map-Reduce framework's support for the
|
||||
* "mark-reset" functionality in Reduce Values Iterator
|
||||
*/
|
||||
public class TestValueIterReset extends TestCase {
|
||||
public class TestValueIterReset {
|
||||
private static final int NUM_MAPS = 1;
|
||||
private static final int NUM_TESTS = 4;
|
||||
private static final int NUM_VALUES = 40;
|
||||
@ -518,6 +519,7 @@ public void createInput() throws Exception {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testValueIterReset() {
|
||||
try {
|
||||
Configuration conf = new Configuration();
|
||||
|
@ -18,6 +18,7 @@
|
||||
|
||||
package org.apache.hadoop.mapreduce;
|
||||
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
@ -26,7 +27,6 @@
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.io.Text;
|
||||
@ -44,8 +44,7 @@
|
||||
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestYarnClientProtocolProvider extends TestCase {
|
||||
|
||||
public class TestYarnClientProtocolProvider {
|
||||
private static final RecordFactory recordFactory = RecordFactoryProvider.
|
||||
getRecordFactory(null);
|
||||
|
||||
|
@ -18,22 +18,24 @@
|
||||
package org.apache.hadoop.mapreduce.lib.aggregate;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.*;
|
||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.io.*;
|
||||
import org.apache.hadoop.mapred.Utils;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.mapreduce.Job;
|
||||
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
||||
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.input.TextInputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.output.TextOutputFormat;
|
||||
import org.junit.Test;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import java.io.*;
|
||||
import java.text.NumberFormat;
|
||||
|
||||
public class TestMapReduceAggregates extends TestCase {
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class TestMapReduceAggregates {
|
||||
|
||||
private static NumberFormat idFormat = NumberFormat.getInstance();
|
||||
static {
|
||||
@ -41,7 +43,7 @@ public class TestMapReduceAggregates extends TestCase {
|
||||
idFormat.setGroupingUsed(false);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testAggregates() throws Exception {
|
||||
launch();
|
||||
}
|
||||
@ -123,11 +125,4 @@ public static void launch() throws Exception {
|
||||
fs.delete(OUTPUT_DIR, true);
|
||||
fs.delete(INPUT_DIR, true);
|
||||
}
|
||||
|
||||
/**
|
||||
* Launches all the tasks in order.
|
||||
*/
|
||||
public static void main(String[] argv) throws Exception {
|
||||
launch();
|
||||
}
|
||||
}
|
||||
|
@ -19,14 +19,15 @@
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.io.NullWritable;
|
||||
import org.apache.hadoop.mapreduce.Job;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestDBOutputFormat extends TestCase {
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertNull;
|
||||
|
||||
public class TestDBOutputFormat {
|
||||
private String[] fieldNames = new String[] { "id", "name", "value" };
|
||||
private String[] nullFieldNames = new String[] { null, null, null };
|
||||
private String expected = "INSERT INTO hadoop_output " +
|
||||
@ -35,15 +36,17 @@ public class TestDBOutputFormat extends TestCase {
|
||||
|
||||
private DBOutputFormat<DBWritable, NullWritable> format
|
||||
= new DBOutputFormat<DBWritable, NullWritable>();
|
||||
|
||||
public void testConstructQuery() {
|
||||
|
||||
@Test
|
||||
public void testConstructQuery() {
|
||||
String actual = format.constructQuery("hadoop_output", fieldNames);
|
||||
assertEquals(expected, actual);
|
||||
|
||||
actual = format.constructQuery("hadoop_output", nullFieldNames);
|
||||
assertEquals(nullExpected, actual);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSetOutput() throws IOException {
|
||||
Job job = Job.getInstance(new Configuration());
|
||||
DBOutputFormat.setOutput(job, "hadoop_output", fieldNames);
|
||||
|
@ -17,15 +17,15 @@
|
||||
*/
|
||||
package org.apache.hadoop.mapreduce.lib.db;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.math.BigDecimal;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
public class TestIntegerSplitter extends TestCase {
|
||||
public class TestIntegerSplitter {
|
||||
private long [] toLongArray(List<Long> in) {
|
||||
long [] out = new long[in.size()];
|
||||
for (int i = 0; i < in.size(); i++) {
|
||||
@ -70,12 +70,14 @@ public void assertLongArrayEquals(long [] expected, long [] actual) {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEvenSplits() throws SQLException {
|
||||
List<Long> splits = new IntegerSplitter().split(10, 0, 100);
|
||||
long [] expected = { 0, 10, 20, 30, 40, 50, 60, 70, 80, 90, 100 };
|
||||
assertLongArrayEquals(expected, toLongArray(splits));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testOddSplits() throws SQLException {
|
||||
List<Long> splits = new IntegerSplitter().split(10, 0, 95);
|
||||
long [] expected = { 0, 9, 18, 27, 36, 45, 54, 63, 72, 81, 90, 95 };
|
||||
@ -83,12 +85,14 @@ public void testOddSplits() throws SQLException {
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSingletonSplit() throws SQLException {
|
||||
List<Long> splits = new IntegerSplitter().split(1, 5, 5);
|
||||
long [] expected = { 5, 5 };
|
||||
assertLongArrayEquals(expected, toLongArray(splits));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSingletonSplit2() throws SQLException {
|
||||
// Same test, but overly-high numSplits
|
||||
List<Long> splits = new IntegerSplitter().split(5, 5, 5);
|
||||
@ -96,6 +100,7 @@ public void testSingletonSplit2() throws SQLException {
|
||||
assertLongArrayEquals(expected, toLongArray(splits));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTooManySplits() throws SQLException {
|
||||
List<Long> splits = new IntegerSplitter().split(5, 3, 5);
|
||||
long [] expected = { 3, 4, 5 };
|
||||
|
@ -17,15 +17,16 @@
|
||||
*/
|
||||
package org.apache.hadoop.mapreduce.lib.db;
|
||||
|
||||
import java.io.IOException;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.math.BigDecimal;
|
||||
import java.sql.SQLException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
public class TestTextSplitter extends TestCase {
|
||||
public class TestTextSplitter {
|
||||
|
||||
public String formatArray(Object [] ar) {
|
||||
StringBuilder sb = new StringBuilder();
|
||||
@ -62,48 +63,56 @@ public void assertArrayEquals(Object [] expected, Object [] actual) {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testStringConvertEmpty() {
|
||||
TextSplitter splitter = new TextSplitter();
|
||||
BigDecimal emptyBigDec = splitter.stringToBigDecimal("");
|
||||
assertEquals(BigDecimal.ZERO, emptyBigDec);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBigDecConvertEmpty() {
|
||||
TextSplitter splitter = new TextSplitter();
|
||||
String emptyStr = splitter.bigDecimalToString(BigDecimal.ZERO);
|
||||
assertEquals("", emptyStr);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testConvertA() {
|
||||
TextSplitter splitter = new TextSplitter();
|
||||
String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("A"));
|
||||
assertEquals("A", out);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testConvertZ() {
|
||||
TextSplitter splitter = new TextSplitter();
|
||||
String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("Z"));
|
||||
assertEquals("Z", out);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testConvertThreeChars() {
|
||||
TextSplitter splitter = new TextSplitter();
|
||||
String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("abc"));
|
||||
assertEquals("abc", out);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testConvertStr() {
|
||||
TextSplitter splitter = new TextSplitter();
|
||||
String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("big str"));
|
||||
assertEquals("big str", out);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testConvertChomped() {
|
||||
TextSplitter splitter = new TextSplitter();
|
||||
String out = splitter.bigDecimalToString(splitter.stringToBigDecimal("AVeryLongStringIndeed"));
|
||||
assertEquals("AVeryLon", out);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testAlphabetSplit() throws SQLException {
|
||||
// This should give us 25 splits, one per letter.
|
||||
TextSplitter splitter = new TextSplitter();
|
||||
@ -113,6 +122,7 @@ public void testAlphabetSplit() throws SQLException {
|
||||
assertArrayEquals(expected, splits.toArray(new String [0]));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testCommonPrefix() throws SQLException {
|
||||
// Splits between 'Hand' and 'Hardy'
|
||||
TextSplitter splitter = new TextSplitter();
|
||||
|
@ -18,15 +18,19 @@
|
||||
package org.apache.hadoop.mapreduce.lib.fieldsel;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.*;
|
||||
import org.apache.hadoop.io.*;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.mapreduce.Job;
|
||||
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
||||
import org.junit.Test;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import java.text.NumberFormat;
|
||||
|
||||
public class TestMRFieldSelection extends TestCase {
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class TestMRFieldSelection {
|
||||
|
||||
private static NumberFormat idFormat = NumberFormat.getInstance();
|
||||
static {
|
||||
@ -34,6 +38,7 @@ public class TestMRFieldSelection extends TestCase {
|
||||
idFormat.setGroupingUsed(false);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testFieldSelection() throws Exception {
|
||||
launch();
|
||||
}
|
||||
@ -114,11 +119,4 @@ public static void constructInputOutputData(StringBuffer inputData,
|
||||
System.out.println("ExpectedData:");
|
||||
System.out.println(expectedOutput.toString());
|
||||
}
|
||||
|
||||
/**
|
||||
* Launches all the tasks in order.
|
||||
*/
|
||||
public static void main(String[] argv) throws Exception {
|
||||
launch();
|
||||
}
|
||||
}
|
||||
|
@ -18,11 +18,12 @@
|
||||
|
||||
package org.apache.hadoop.mapreduce.lib.input;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.hadoop.fs.*;
|
||||
import org.apache.hadoop.io.*;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.BytesWritable;
|
||||
import org.apache.hadoop.io.DataInputBuffer;
|
||||
import org.apache.hadoop.io.SequenceFile;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.mapreduce.InputFormat;
|
||||
import org.apache.hadoop.mapreduce.InputSplit;
|
||||
import org.apache.hadoop.mapreduce.Job;
|
||||
@ -31,12 +32,18 @@
|
||||
import org.apache.hadoop.mapreduce.RecordReader;
|
||||
import org.apache.hadoop.mapreduce.TaskAttemptContext;
|
||||
import org.apache.hadoop.mapreduce.task.MapContextImpl;
|
||||
import org.junit.Test;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import java.io.IOException;
|
||||
import java.util.Random;
|
||||
|
||||
public class TestMRSequenceFileAsBinaryInputFormat extends TestCase {
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class TestMRSequenceFileAsBinaryInputFormat {
|
||||
private static final int RECORDS = 10000;
|
||||
|
||||
@Test
|
||||
public void testBinary() throws IOException, InterruptedException {
|
||||
Job job = Job.getInstance();
|
||||
FileSystem fs = FileSystem.getLocal(job.getConfiguration());
|
||||
|
@ -18,11 +18,13 @@
|
||||
|
||||
package org.apache.hadoop.mapreduce.lib.input;
|
||||
|
||||
import java.util.*;
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.fs.*;
|
||||
import org.apache.hadoop.io.*;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.IntWritable;
|
||||
import org.apache.hadoop.io.LongWritable;
|
||||
import org.apache.hadoop.io.SequenceFile;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.mapreduce.InputFormat;
|
||||
import org.apache.hadoop.mapreduce.InputSplit;
|
||||
import org.apache.hadoop.mapreduce.Job;
|
||||
@ -31,12 +33,19 @@
|
||||
import org.apache.hadoop.mapreduce.RecordReader;
|
||||
import org.apache.hadoop.mapreduce.TaskAttemptContext;
|
||||
import org.apache.hadoop.mapreduce.task.MapContextImpl;
|
||||
import org.apache.hadoop.conf.*;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestMRSequenceFileAsTextInputFormat extends TestCase {
|
||||
import java.util.BitSet;
|
||||
import java.util.Random;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
|
||||
public class TestMRSequenceFileAsTextInputFormat {
|
||||
private static int MAX_LENGTH = 10000;
|
||||
private static Configuration conf = new Configuration();
|
||||
|
||||
@Test
|
||||
public void testFormat() throws Exception {
|
||||
Job job = Job.getInstance(conf);
|
||||
FileSystem fs = FileSystem.getLocal(conf);
|
||||
@ -112,8 +121,4 @@ public void testFormat() throws Exception {
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
new TestMRSequenceFileAsTextInputFormat().testFormat();
|
||||
}
|
||||
}
|
||||
|
@ -18,14 +18,14 @@
|
||||
|
||||
package org.apache.hadoop.mapreduce.lib.input;
|
||||
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.commons.logging.*;
|
||||
|
||||
import org.apache.hadoop.fs.*;
|
||||
import org.apache.hadoop.io.*;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.BytesWritable;
|
||||
import org.apache.hadoop.io.SequenceFile;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.mapreduce.InputFormat;
|
||||
import org.apache.hadoop.mapreduce.InputSplit;
|
||||
import org.apache.hadoop.mapreduce.Job;
|
||||
@ -34,10 +34,15 @@
|
||||
import org.apache.hadoop.mapreduce.RecordReader;
|
||||
import org.apache.hadoop.mapreduce.TaskAttemptContext;
|
||||
import org.apache.hadoop.mapreduce.task.MapContextImpl;
|
||||
import org.apache.hadoop.conf.*;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestMRSequenceFileInputFilter extends TestCase {
|
||||
private static final Log LOG =
|
||||
import java.io.IOException;
|
||||
import java.util.Random;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
public class TestMRSequenceFileInputFilter {
|
||||
private static final Log LOG =
|
||||
LogFactory.getLog(TestMRSequenceFileInputFilter.class.getName());
|
||||
|
||||
private static final int MAX_LENGTH = 15000;
|
||||
@ -113,7 +118,8 @@ private int countRecords(int numSplits)
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testRegexFilter() throws Exception {
|
||||
// set the filter class
|
||||
LOG.info("Testing Regex Filter with patter: \\A10*");
|
||||
@ -138,6 +144,7 @@ public void testRegexFilter() throws Exception {
|
||||
fs.delete(inDir, true);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testPercentFilter() throws Exception {
|
||||
LOG.info("Testing Percent Filter with frequency: 1000");
|
||||
// set the filter class
|
||||
@ -165,7 +172,8 @@ public void testPercentFilter() throws Exception {
|
||||
// clean up
|
||||
fs.delete(inDir, true);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testMD5Filter() throws Exception {
|
||||
// set the filter class
|
||||
LOG.info("Testing MD5 Filter with frequency: 1000");
|
||||
@ -187,9 +195,4 @@ public void testMD5Filter() throws Exception {
|
||||
// clean up
|
||||
fs.delete(inDir, true);
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
TestMRSequenceFileInputFilter filter = new TestMRSequenceFileInputFilter();
|
||||
filter.testRegexFilter();
|
||||
}
|
||||
}
|
||||
|
@ -18,17 +18,28 @@
|
||||
|
||||
package org.apache.hadoop.mapreduce.lib.input;
|
||||
|
||||
import java.io.*;
|
||||
import java.util.*;
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.*;
|
||||
import org.apache.hadoop.io.*;
|
||||
import org.apache.hadoop.mapreduce.*;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.LongWritable;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.mapreduce.InputSplit;
|
||||
import org.apache.hadoop.mapreduce.Job;
|
||||
import org.apache.hadoop.mapreduce.MapContext;
|
||||
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
||||
import org.apache.hadoop.mapreduce.RecordReader;
|
||||
import org.apache.hadoop.mapreduce.TaskAttemptContext;
|
||||
import org.apache.hadoop.mapreduce.task.MapContextImpl;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestNLineInputFormat extends TestCase {
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.Writer;
|
||||
import java.util.List;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
public class TestNLineInputFormat {
|
||||
private static int MAX_LENGTH = 200;
|
||||
|
||||
private static Configuration conf = new Configuration();
|
||||
@ -45,7 +56,8 @@ public class TestNLineInputFormat extends TestCase {
|
||||
private static Path workDir =
|
||||
new Path(new Path(System.getProperty("test.build.data", "."), "data"),
|
||||
"TestNLineInputFormat");
|
||||
|
||||
|
||||
@Test
|
||||
public void testFormat() throws Exception {
|
||||
Job job = Job.getInstance(conf);
|
||||
Path file = new Path(workDir, "test.txt");
|
||||
@ -116,8 +128,4 @@ void checkFormat(Job job, int expectedN, int lastN)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
new TestNLineInputFormat().testFormat();
|
||||
}
|
||||
}
|
||||
|
@ -19,11 +19,6 @@
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
import junit.framework.Test;
|
||||
import junit.framework.TestCase;
|
||||
import junit.framework.TestSuite;
|
||||
import junit.extensions.TestSetup;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileStatus;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
@ -37,23 +32,31 @@
|
||||
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
|
||||
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestJoinDatamerge extends TestCase {
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class TestJoinDatamerge {
|
||||
|
||||
private static MiniDFSCluster cluster = null;
|
||||
public static Test suite() {
|
||||
TestSetup setup = new TestSetup(new TestSuite(TestJoinDatamerge.class)) {
|
||||
protected void setUp() throws Exception {
|
||||
Configuration conf = new Configuration();
|
||||
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
|
||||
}
|
||||
protected void tearDown() throws Exception {
|
||||
if (cluster != null) {
|
||||
cluster.shutdown();
|
||||
}
|
||||
}
|
||||
};
|
||||
return setup;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
Configuration conf = new Configuration();
|
||||
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void tearDown() throws Exception {
|
||||
if (cluster != null) {
|
||||
cluster.shutdown();
|
||||
}
|
||||
}
|
||||
|
||||
private static SequenceFile.Writer[] createWriters(Path testdir,
|
||||
@ -111,7 +114,7 @@ private static abstract class SimpleCheckerMapBase<V extends Writable>
|
||||
extends Mapper<IntWritable, V, IntWritable, IntWritable>{
|
||||
protected final static IntWritable one = new IntWritable(1);
|
||||
int srcs;
|
||||
|
||||
|
||||
public void setup(Context context) {
|
||||
srcs = context.getConfiguration().getInt("testdatamerge.sources", 0);
|
||||
assertTrue("Invalid src count: " + srcs, srcs > 0);
|
||||
@ -123,7 +126,7 @@ private static abstract class SimpleCheckerReduceBase
|
||||
protected final static IntWritable one = new IntWritable(1);
|
||||
|
||||
int srcs;
|
||||
|
||||
|
||||
public void setup(Context context) {
|
||||
srcs = context.getConfiguration().getInt("testdatamerge.sources", 0);
|
||||
assertTrue("Invalid src count: " + srcs, srcs > 0);
|
||||
@ -272,10 +275,12 @@ private static void joinAs(String jointype,
|
||||
base.getFileSystem(conf).delete(base, true);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSimpleInnerJoin() throws Exception {
|
||||
joinAs("inner", InnerJoinMapChecker.class, InnerJoinReduceChecker.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSimpleOuterJoin() throws Exception {
|
||||
joinAs("outer", OuterJoinMapChecker.class, OuterJoinReduceChecker.class);
|
||||
}
|
||||
@ -322,11 +327,13 @@ private static int countProduct(IntWritable key, Path[] src,
|
||||
}
|
||||
return product;
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testSimpleOverride() throws Exception {
|
||||
joinAs("override", OverrideMapChecker.class, OverrideReduceChecker.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNestedJoin() throws Exception {
|
||||
// outer(inner(S1,...,Sn),outer(S1,...Sn))
|
||||
final int SOURCES = 3;
|
||||
@ -422,6 +429,7 @@ public void testNestedJoin() throws Exception {
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testEmptyJoin() throws Exception {
|
||||
Configuration conf = new Configuration();
|
||||
Path base = cluster.getFileSystem().makeQualified(new Path("/empty"));
|
||||
|
@ -20,11 +20,6 @@
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
|
||||
import junit.framework.Test;
|
||||
import junit.framework.TestCase;
|
||||
import junit.framework.TestSuite;
|
||||
import junit.extensions.TestSetup;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||
@ -36,8 +31,14 @@
|
||||
import org.apache.hadoop.mapreduce.*;
|
||||
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
|
||||
import org.apache.hadoop.mapreduce.task.MapContextImpl;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestJoinProperties extends TestCase {
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class TestJoinProperties {
|
||||
|
||||
private static MiniDFSCluster cluster = null;
|
||||
final static int SOURCES = 3;
|
||||
@ -46,21 +47,19 @@ public class TestJoinProperties extends TestCase {
|
||||
static Path[] src;
|
||||
static Path base;
|
||||
|
||||
public static Test suite() {
|
||||
TestSetup setup = new TestSetup(new TestSuite(TestJoinProperties.class)) {
|
||||
protected void setUp() throws Exception {
|
||||
Configuration conf = new Configuration();
|
||||
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
|
||||
base = cluster.getFileSystem().makeQualified(new Path("/nested"));
|
||||
src = generateSources(conf);
|
||||
}
|
||||
protected void tearDown() throws Exception {
|
||||
if (cluster != null) {
|
||||
cluster.shutdown();
|
||||
}
|
||||
}
|
||||
};
|
||||
return setup;
|
||||
@BeforeClass
|
||||
public static void setUp() throws Exception {
|
||||
Configuration conf = new Configuration();
|
||||
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(2).build();
|
||||
base = cluster.getFileSystem().makeQualified(new Path("/nested"));
|
||||
src = generateSources(conf);
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void tearDown() throws Exception {
|
||||
if (cluster != null) {
|
||||
cluster.shutdown();
|
||||
}
|
||||
}
|
||||
|
||||
// Sources from 0 to srcs-2 have IntWritable key and IntWritable value
|
||||
@ -233,6 +232,7 @@ private void testExpr4(Configuration conf) throws Exception {
|
||||
}
|
||||
|
||||
// outer(outer(A, B), C) == outer(A,outer(B, C)) == outer(A, B, C)
|
||||
@Test
|
||||
public void testOuterAssociativity() throws Exception {
|
||||
Configuration conf = new Configuration();
|
||||
testExpr1(conf, "outer", TestType.OUTER_ASSOCIATIVITY, 33);
|
||||
@ -241,6 +241,7 @@ public void testOuterAssociativity() throws Exception {
|
||||
}
|
||||
|
||||
// inner(inner(A, B), C) == inner(A,inner(B, C)) == inner(A, B, C)
|
||||
@Test
|
||||
public void testInnerAssociativity() throws Exception {
|
||||
Configuration conf = new Configuration();
|
||||
testExpr1(conf, "inner", TestType.INNER_ASSOCIATIVITY, 2);
|
||||
@ -249,6 +250,7 @@ public void testInnerAssociativity() throws Exception {
|
||||
}
|
||||
|
||||
// override(inner(A, B), A) == A
|
||||
@Test
|
||||
public void testIdentity() throws Exception {
|
||||
Configuration conf = new Configuration();
|
||||
testExpr4(conf);
|
||||
|
@ -24,8 +24,6 @@
|
||||
import java.util.Arrays;
|
||||
import java.util.Random;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.io.BooleanWritable;
|
||||
import org.apache.hadoop.io.BytesWritable;
|
||||
import org.apache.hadoop.io.FloatWritable;
|
||||
@ -33,8 +31,13 @@
|
||||
import org.apache.hadoop.io.LongWritable;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.io.Writable;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestJoinTupleWritable extends TestCase {
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class TestJoinTupleWritable {
|
||||
|
||||
private TupleWritable makeTuple(Writable[] writs) {
|
||||
Writable[] sub1 = { writs[1], writs[2] };
|
||||
@ -97,6 +100,7 @@ private int verifIter(Writable[] writs, TupleWritable t, int i) {
|
||||
return i;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testIterable() throws Exception {
|
||||
Random r = new Random();
|
||||
Writable[] writs = {
|
||||
@ -118,6 +122,7 @@ public void testIterable() throws Exception {
|
||||
verifIter(writs, t, 0);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNestedIterable() throws Exception {
|
||||
Random r = new Random();
|
||||
Writable[] writs = {
|
||||
@ -136,6 +141,7 @@ public void testNestedIterable() throws Exception {
|
||||
assertTrue("Bad count", writs.length == verifIter(writs, sTuple, 0));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWritable() throws Exception {
|
||||
Random r = new Random();
|
||||
Writable[] writs = {
|
||||
@ -159,6 +165,7 @@ public void testWritable() throws Exception {
|
||||
assertTrue("Failed to write/read tuple", sTuple.equals(dTuple));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testWideWritable() throws Exception {
|
||||
Writable[] manyWrits = makeRandomWritables(131);
|
||||
|
||||
@ -178,7 +185,8 @@ public void testWideWritable() throws Exception {
|
||||
assertEquals("All tuple data has not been read from the stream",
|
||||
-1, in.read());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testWideWritable2() throws Exception {
|
||||
Writable[] manyWrits = makeRandomWritables(71);
|
||||
|
||||
@ -201,6 +209,7 @@ public void testWideWritable2() throws Exception {
|
||||
* Tests a tuple writable with more than 64 values and the values set written
|
||||
* spread far apart.
|
||||
*/
|
||||
@Test
|
||||
public void testSparseWideWritable() throws Exception {
|
||||
Writable[] manyWrits = makeRandomWritables(131);
|
||||
|
||||
@ -220,7 +229,8 @@ public void testSparseWideWritable() throws Exception {
|
||||
assertEquals("All tuple data has not been read from the stream",
|
||||
-1, in.read());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testWideTuple() throws Exception {
|
||||
Text emptyText = new Text("Should be empty");
|
||||
Writable[] values = new Writable[64];
|
||||
@ -241,7 +251,8 @@ public void testWideTuple() throws Exception {
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testWideTuple2() throws Exception {
|
||||
Text emptyText = new Text("Should be empty");
|
||||
Writable[] values = new Writable[64];
|
||||
@ -266,6 +277,7 @@ public void testWideTuple2() throws Exception {
|
||||
/**
|
||||
* Tests that we can write more than 64 values.
|
||||
*/
|
||||
@Test
|
||||
public void testWideTupleBoundary() throws Exception {
|
||||
Text emptyText = new Text("Should not be set written");
|
||||
Writable[] values = new Writable[65];
|
||||
|
@ -17,23 +17,32 @@
|
||||
*/
|
||||
package org.apache.hadoop.mapreduce.lib.join;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.NullWritable;
|
||||
import org.apache.hadoop.mapreduce.*;
|
||||
import org.apache.hadoop.mapreduce.InputSplit;
|
||||
import org.apache.hadoop.mapreduce.Job;
|
||||
import org.apache.hadoop.mapreduce.MRJobConfig;
|
||||
import org.apache.hadoop.mapreduce.MapReduceTestUtil;
|
||||
import org.apache.hadoop.mapreduce.MapReduceTestUtil.Fake_RR;
|
||||
import org.apache.hadoop.mapreduce.RecordReader;
|
||||
import org.apache.hadoop.mapreduce.TaskAttemptContext;
|
||||
import org.apache.hadoop.mapreduce.TaskAttemptID;
|
||||
import org.apache.hadoop.mapreduce.TaskType;
|
||||
import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestWrappedRRClassloader extends TestCase {
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class TestWrappedRRClassloader {
|
||||
/**
|
||||
* Tests the class loader set by
|
||||
* {@link Configuration#setClassLoader(ClassLoader)}
|
||||
* is inherited by any {@link WrappedRecordReader}s created by
|
||||
* {@link CompositeRecordReader}
|
||||
*/
|
||||
@Test
|
||||
public void testClassLoader() throws Exception {
|
||||
Configuration conf = new Configuration();
|
||||
Fake_ClassLoader classLoader = new Fake_ClassLoader();
|
||||
|
@ -18,12 +18,17 @@
|
||||
|
||||
package org.apache.hadoop.mapreduce.lib.output;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Random;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.*;
|
||||
import org.apache.hadoop.io.*;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.BooleanWritable;
|
||||
import org.apache.hadoop.io.BytesWritable;
|
||||
import org.apache.hadoop.io.DataOutputBuffer;
|
||||
import org.apache.hadoop.io.DoubleWritable;
|
||||
import org.apache.hadoop.io.FloatWritable;
|
||||
import org.apache.hadoop.io.IntWritable;
|
||||
import org.apache.hadoop.io.SequenceFile.CompressionType;
|
||||
import org.apache.hadoop.mapred.InvalidJobConfException;
|
||||
import org.apache.hadoop.mapreduce.InputFormat;
|
||||
@ -38,16 +43,22 @@
|
||||
import org.apache.hadoop.mapreduce.TaskAttemptContext;
|
||||
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
|
||||
import org.apache.hadoop.mapreduce.task.MapContextImpl;
|
||||
import org.junit.Test;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.commons.logging.*;
|
||||
import java.io.IOException;
|
||||
import java.util.Random;
|
||||
|
||||
public class TestMRSequenceFileAsBinaryOutputFormat extends TestCase {
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
public class TestMRSequenceFileAsBinaryOutputFormat {
|
||||
private static final Log LOG =
|
||||
LogFactory.getLog(TestMRSequenceFileAsBinaryOutputFormat.class.getName());
|
||||
|
||||
private static final int RECORDS = 10000;
|
||||
|
||||
|
||||
@Test
|
||||
public void testBinary() throws IOException, InterruptedException {
|
||||
Configuration conf = new Configuration();
|
||||
Job job = Job.getInstance(conf);
|
||||
@ -144,7 +155,8 @@ public void testBinary() throws IOException, InterruptedException {
|
||||
assertEquals("Some records not found", RECORDS, count);
|
||||
}
|
||||
|
||||
public void testSequenceOutputClassDefaultsToMapRedOutputClass()
|
||||
@Test
|
||||
public void testSequenceOutputClassDefaultsToMapRedOutputClass()
|
||||
throws IOException {
|
||||
Job job = Job.getInstance();
|
||||
// Setting Random class to test getSequenceFileOutput{Key,Value}Class
|
||||
@ -172,7 +184,8 @@ public void testSequenceOutputClassDefaultsToMapRedOutputClass()
|
||||
SequenceFileAsBinaryOutputFormat.getSequenceFileOutputValueClass(job));
|
||||
}
|
||||
|
||||
public void testcheckOutputSpecsForbidRecordCompression()
|
||||
@Test
|
||||
public void testcheckOutputSpecsForbidRecordCompression()
|
||||
throws IOException {
|
||||
Job job = Job.getInstance();
|
||||
FileSystem fs = FileSystem.getLocal(job.getConfiguration());
|
||||
|
@ -22,11 +22,14 @@
|
||||
import org.apache.hadoop.io.BinaryComparable;
|
||||
import org.apache.hadoop.io.BytesWritable;
|
||||
import org.apache.hadoop.util.ReflectionUtils;
|
||||
import org.junit.Test;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class TestBinaryPartitioner extends TestCase {
|
||||
public class TestBinaryPartitioner {
|
||||
|
||||
@Test
|
||||
public void testDefaultOffsets() {
|
||||
Configuration conf = new Configuration();
|
||||
BinaryPartitioner<?> partitioner =
|
||||
@ -50,7 +53,8 @@ public void testDefaultOffsets() {
|
||||
partition2 = partitioner.getPartition(key2, null, 10);
|
||||
assertTrue(partition1 != partition2);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testCustomOffsets() {
|
||||
Configuration conf = new Configuration();
|
||||
BinaryComparable key1 = new BytesWritable(new byte[] { 1, 2, 3, 4, 5 });
|
||||
@ -75,7 +79,8 @@ public void testCustomOffsets() {
|
||||
partition2 = partitioner.getPartition(key2, null, 10);
|
||||
assertEquals(partition1, partition2);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testLowerBound() {
|
||||
Configuration conf = new Configuration();
|
||||
BinaryPartitioner.setLeftOffset(conf, 0);
|
||||
@ -87,7 +92,8 @@ public void testLowerBound() {
|
||||
int partition2 = partitioner.getPartition(key2, null, 10);
|
||||
assertTrue(partition1 != partition2);
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void testUpperBound() {
|
||||
Configuration conf = new Configuration();
|
||||
BinaryPartitioner.setRightOffset(conf, 4);
|
||||
|
@ -19,14 +19,17 @@
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.junit.Test;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class TestKeyFieldHelper extends TestCase {
|
||||
public class TestKeyFieldHelper {
|
||||
private static final Log LOG = LogFactory.getLog(TestKeyFieldHelper.class);
|
||||
/**
|
||||
* Test is key-field-helper's parse option.
|
||||
*/
|
||||
@Test
|
||||
public void testparseOption() throws Exception {
|
||||
KeyFieldHelper helper = new KeyFieldHelper();
|
||||
helper.setKeyFieldSeparator("\t");
|
||||
@ -212,6 +215,7 @@ public void testparseOption() throws Exception {
|
||||
/**
|
||||
* Test is key-field-helper's getWordLengths.
|
||||
*/
|
||||
@Test
|
||||
public void testGetWordLengths() throws Exception {
|
||||
KeyFieldHelper helper = new KeyFieldHelper();
|
||||
helper.setKeyFieldSeparator("\t");
|
||||
@ -270,6 +274,7 @@ public void testGetWordLengths() throws Exception {
|
||||
/**
|
||||
* Test is key-field-helper's getStartOffset/getEndOffset.
|
||||
*/
|
||||
@Test
|
||||
public void testgetStartEndOffset() throws Exception {
|
||||
KeyFieldHelper helper = new KeyFieldHelper();
|
||||
helper.setKeyFieldSeparator("\t");
|
||||
|
@ -19,14 +19,16 @@
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.junit.Test;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
public class TestMRKeyFieldBasedPartitioner extends TestCase {
|
||||
public class TestMRKeyFieldBasedPartitioner {
|
||||
|
||||
/**
|
||||
* Test is key-field-based partitioned works with empty key.
|
||||
*/
|
||||
@Test
|
||||
public void testEmptyKey() throws Exception {
|
||||
int numReducers = 10;
|
||||
KeyFieldBasedPartitioner<Text, Text> kfbp =
|
||||
|
@ -23,8 +23,6 @@
|
||||
import java.util.Arrays;
|
||||
import java.util.Comparator;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
@ -41,8 +39,11 @@
|
||||
import org.apache.hadoop.io.serializer.Serialization;
|
||||
import org.apache.hadoop.io.serializer.WritableSerialization;
|
||||
import org.apache.hadoop.mapreduce.MRJobConfig;
|
||||
import org.junit.Test;
|
||||
|
||||
public class TestTotalOrderPartitioner extends TestCase {
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
public class TestTotalOrderPartitioner {
|
||||
|
||||
private static final Text[] splitStrings = new Text[] {
|
||||
// -inf // 0
|
||||
@ -140,6 +141,7 @@ private static <T> Path writePartitionFile(
|
||||
return p;
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTotalOrderWithCustomSerialization() throws Exception {
|
||||
TotalOrderPartitioner<String, NullWritable> partitioner =
|
||||
new TotalOrderPartitioner<String, NullWritable>();
|
||||
@ -165,6 +167,7 @@ public void testTotalOrderWithCustomSerialization() throws Exception {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTotalOrderMemCmp() throws Exception {
|
||||
TotalOrderPartitioner<Text,NullWritable> partitioner =
|
||||
new TotalOrderPartitioner<Text,NullWritable>();
|
||||
@ -184,6 +187,7 @@ public void testTotalOrderMemCmp() throws Exception {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTotalOrderBinarySearch() throws Exception {
|
||||
TotalOrderPartitioner<Text,NullWritable> partitioner =
|
||||
new TotalOrderPartitioner<Text,NullWritable>();
|
||||
@ -216,6 +220,7 @@ public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTotalOrderCustomComparator() throws Exception {
|
||||
TotalOrderPartitioner<Text,NullWritable> partitioner =
|
||||
new TotalOrderPartitioner<Text,NullWritable>();
|
||||
|
@ -20,8 +20,6 @@
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
@ -30,20 +28,27 @@
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.fs.FileUtil;
|
||||
import org.apache.hadoop.mapreduce.util.MRAsyncDiskService;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
/**
|
||||
* A test for MRAsyncDiskService.
|
||||
*/
|
||||
public class TestMRAsyncDiskService extends TestCase {
|
||||
public class TestMRAsyncDiskService {
|
||||
|
||||
public static final Log LOG = LogFactory.getLog(TestMRAsyncDiskService.class);
|
||||
|
||||
private static String TEST_ROOT_DIR = new Path(System.getProperty(
|
||||
"test.build.data", "/tmp")).toString();
|
||||
|
||||
@Override
|
||||
protected void setUp() {
|
||||
@Before
|
||||
public void setUp() {
|
||||
FileUtil.fullyDelete(new File(TEST_ROOT_DIR));
|
||||
}
|
||||
|
||||
|
@ -17,7 +17,6 @@
|
||||
*/
|
||||
package org.apache.hadoop.mapreduce.v2;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.fs.permission.FsPermission;
|
||||
@ -29,22 +28,25 @@
|
||||
import org.apache.hadoop.mapred.RunningJob;
|
||||
import org.apache.hadoop.security.UserGroupInformation;
|
||||
import org.apache.hadoop.security.authorize.ProxyUsers;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.net.InetAddress;
|
||||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.OutputStream;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.Writer;
|
||||
import java.net.InetAddress;
|
||||
import java.security.PrivilegedExceptionAction;
|
||||
|
||||
public class TestMiniMRProxyUser extends TestCase {
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
public class TestMiniMRProxyUser {
|
||||
|
||||
private MiniDFSCluster dfsCluster = null;
|
||||
private MiniMRCluster mrCluster = null;
|
||||
|
||||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
if (System.getProperty("hadoop.log.dir") == null) {
|
||||
System.setProperty("hadoop.log.dir", "/tmp");
|
||||
}
|
||||
@ -91,15 +93,14 @@ protected JobConf getJobConf() {
|
||||
return mrCluster.createJobConf();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void tearDown() throws Exception {
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
if (mrCluster != null) {
|
||||
mrCluster.shutdown();
|
||||
}
|
||||
if (dfsCluster != null) {
|
||||
dfsCluster.shutdown();
|
||||
}
|
||||
super.tearDown();
|
||||
}
|
||||
|
||||
private void mrRun() throws Exception {
|
||||
@ -125,11 +126,13 @@ private void mrRun() throws Exception {
|
||||
assertTrue(runJob.isComplete());
|
||||
assertTrue(runJob.isSuccessful());
|
||||
}
|
||||
|
||||
|
||||
@Test
|
||||
public void __testCurrentUser() throws Exception {
|
||||
mrRun();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testValidProxyUser() throws Exception {
|
||||
UserGroupInformation ugi = UserGroupInformation.createProxyUser("u1", UserGroupInformation.getLoginUser());
|
||||
ugi.doAs(new PrivilegedExceptionAction<Void>() {
|
||||
@ -142,6 +145,7 @@ public Void run() throws Exception {
|
||||
});
|
||||
}
|
||||
|
||||
@Test
|
||||
public void ___testInvalidProxyUser() throws Exception {
|
||||
UserGroupInformation ugi = UserGroupInformation.createProxyUser("u2", UserGroupInformation.getLoginUser());
|
||||
ugi.doAs(new PrivilegedExceptionAction<Void>() {
|
||||
|
@ -17,7 +17,6 @@
|
||||
*/
|
||||
package org.apache.hadoop.mapreduce.v2;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.fs.permission.FsPermission;
|
||||
@ -28,17 +27,22 @@
|
||||
import org.apache.hadoop.mapred.MiniMRCluster;
|
||||
import org.apache.hadoop.mapred.RunningJob;
|
||||
import org.apache.hadoop.security.authorize.ProxyUsers;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.net.InetAddress;
|
||||
|
||||
public class TestNonExistentJob extends TestCase {
|
||||
import static org.junit.Assert.assertNull;
|
||||
|
||||
public class TestNonExistentJob {
|
||||
|
||||
private MiniDFSCluster dfsCluster = null;
|
||||
private MiniMRCluster mrCluster = null;
|
||||
|
||||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
if (System.getProperty("hadoop.log.dir") == null) {
|
||||
System.setProperty("hadoop.log.dir", "/tmp");
|
||||
}
|
||||
@ -78,17 +82,17 @@ protected JobConf getJobConf() {
|
||||
return mrCluster.createJobConf();
|
||||
}
|
||||
|
||||
@Override
|
||||
protected void tearDown() throws Exception {
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
if (mrCluster != null) {
|
||||
mrCluster.shutdown();
|
||||
}
|
||||
if (dfsCluster != null) {
|
||||
dfsCluster.shutdown();
|
||||
}
|
||||
super.tearDown();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetInvalidJob() throws Exception {
|
||||
RunningJob runJob = new JobClient(getJobConf()).getJob(JobID.forName("job_0_0"));
|
||||
assertNull(runJob);
|
||||
|
@ -42,6 +42,11 @@
|
||||
import org.apache.hadoop.mapred.SkipBadRecords;
|
||||
import org.apache.hadoop.mapred.Utils;
|
||||
import org.apache.hadoop.mapreduce.server.jobtracker.JTConfig;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
public class TestStreamingBadRecords extends ClusterMapReduceTestCase
|
||||
{
|
||||
@ -68,7 +73,8 @@ public TestStreamingBadRecords() throws IOException
|
||||
utilTest.redirectIfAntJunit();
|
||||
}
|
||||
|
||||
protected void setUp() throws Exception {
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
Properties props = new Properties();
|
||||
props.setProperty(JTConfig.JT_RETIREJOBS, "false");
|
||||
props.setProperty(JTConfig.JT_PERSIST_JOBSTATUS, "false");
|
||||
@ -242,6 +248,7 @@ public void testNarrowDown() throws Exception {
|
||||
}
|
||||
*/
|
||||
|
||||
@Test
|
||||
public void testNoOp() {
|
||||
// Added to avoid warnings when running this disabled test
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user