MAPREDUCE-5640. Rename TestLineRecordReader in jobclient module (Jason Lowe via jeagles)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1547149 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jonathan Turner Eagles 2013-12-02 19:04:51 +00:00
parent a4e473c6a9
commit 74851537de
3 changed files with 15 additions and 14 deletions

View File

@ -1512,6 +1512,9 @@ Release 0.23.10 - UNRELEASED
IMPROVEMENTS IMPROVEMENTS
MAPREDUCE-5640. Rename TestLineRecordReader in jobclient module (Jason Lowe
via jeagles)
OPTIMIZATIONS OPTIMIZATIONS
MAPREDUCE-1981. Improve getSplits performance by using listLocatedStatus MAPREDUCE-1981. Improve getSplits performance by using listLocatedStatus

View File

@ -17,22 +17,20 @@
package org.apache.hadoop.mapred; package org.apache.hadoop.mapred;
import static org.junit.Assert.assertEquals;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStreamWriter; import java.io.OutputStreamWriter;
import java.io.Writer; import java.io.Writer;
import junit.framework.TestCase;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.lib.IdentityMapper; import org.apache.hadoop.mapred.lib.IdentityMapper;
import org.apache.hadoop.mapred.lib.IdentityReducer; import org.apache.hadoop.mapred.lib.IdentityReducer;
import org.junit.Test; import org.junit.Test;
public class TestLineRecordReader extends TestCase { public class TestLineRecordReaderJobs {
private static Path workDir = new Path(new Path(System.getProperty( private static Path workDir = new Path(new Path(System.getProperty(
"test.build.data", "."), "data"), "TestTextInputFormat"); "test.build.data", "."), "data"), "TestTextInputFormat");
@ -77,7 +75,7 @@ public String readOutputFile(Configuration conf) throws IOException {
public void createAndRunJob(Configuration conf) throws IOException, public void createAndRunJob(Configuration conf) throws IOException,
InterruptedException, ClassNotFoundException { InterruptedException, ClassNotFoundException {
JobConf job = new JobConf(conf); JobConf job = new JobConf(conf);
job.setJarByClass(TestLineRecordReader.class); job.setJarByClass(TestLineRecordReaderJobs.class);
job.setMapperClass(IdentityMapper.class); job.setMapperClass(IdentityMapper.class);
job.setReducerClass(IdentityReducer.class); job.setReducerClass(IdentityReducer.class);
FileInputFormat.addInputPath(job, inputDir); FileInputFormat.addInputPath(job, inputDir);
@ -106,7 +104,7 @@ public void testCustomRecordDelimiters() throws IOException,
createInputFile(conf); createInputFile(conf);
createAndRunJob(conf); createAndRunJob(conf);
String expected = "0\tabc\ndef\n9\tghi\njkl\n"; String expected = "0\tabc\ndef\n9\tghi\njkl\n";
this.assertEquals(expected, readOutputFile(conf)); assertEquals(expected, readOutputFile(conf));
} }
/** /**
@ -128,7 +126,7 @@ public void testDefaultRecordDelimiters() throws IOException,
createInputFile(conf); createInputFile(conf);
createAndRunJob(conf); createAndRunJob(conf);
String expected = "0\tabc\n4\tdef\t\n9\tghi\n13\tjkl\n"; String expected = "0\tabc\n4\tdef\t\n9\tghi\n13\tjkl\n";
this.assertEquals(expected, readOutputFile(conf)); assertEquals(expected, readOutputFile(conf));
} }
} }

View File

@ -18,12 +18,12 @@
package org.apache.hadoop.mapreduce.lib.input; package org.apache.hadoop.mapreduce.lib.input;
import static org.junit.Assert.assertEquals;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStreamWriter; import java.io.OutputStreamWriter;
import java.io.Writer; import java.io.Writer;
import junit.framework.TestCase;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -34,7 +34,7 @@
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.junit.Test; import org.junit.Test;
public class TestLineRecordReader extends TestCase { public class TestLineRecordReaderJobs {
private static Path workDir = new Path(new Path(System.getProperty( private static Path workDir = new Path(new Path(System.getProperty(
"test.build.data", "."), "data"), "TestTextInputFormat"); "test.build.data", "."), "data"), "TestTextInputFormat");
@ -79,7 +79,7 @@ public String readOutputFile(Configuration conf) throws IOException {
public void createAndRunJob(Configuration conf) throws IOException, public void createAndRunJob(Configuration conf) throws IOException,
InterruptedException, ClassNotFoundException { InterruptedException, ClassNotFoundException {
Job job = Job.getInstance(conf); Job job = Job.getInstance(conf);
job.setJarByClass(TestLineRecordReader.class); job.setJarByClass(TestLineRecordReaderJobs.class);
job.setMapperClass(Mapper.class); job.setMapperClass(Mapper.class);
job.setReducerClass(Reducer.class); job.setReducerClass(Reducer.class);
FileInputFormat.addInputPath(job, inputDir); FileInputFormat.addInputPath(job, inputDir);
@ -107,7 +107,7 @@ public void testCustomRecordDelimiters() throws IOException,
createInputFile(conf); createInputFile(conf);
createAndRunJob(conf); createAndRunJob(conf);
String expected = "0\tabc\ndef\n9\tghi\njkl\n"; String expected = "0\tabc\ndef\n9\tghi\njkl\n";
this.assertEquals(expected, readOutputFile(conf)); assertEquals(expected, readOutputFile(conf));
} }
/** /**
@ -129,7 +129,7 @@ public void testDefaultRecordDelimiters() throws IOException,
createInputFile(conf); createInputFile(conf);
createAndRunJob(conf); createAndRunJob(conf);
String expected = "0\tabc\n4\tdef\t\n9\tghi\n13\tjkl\n"; String expected = "0\tabc\n4\tdef\t\n9\tghi\n13\tjkl\n";
this.assertEquals(expected, readOutputFile(conf)); assertEquals(expected, readOutputFile(conf));
} }
} }