MAPREDUCE-5978. native-task: remove test case for not supported codec Bzip2Codec and DefaultCodec (Manu Zhang)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/MR-2841@1616116 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
432f641bc2
commit
4d4fb1723f
@ -10,4 +10,5 @@ MAPREDUCE-5991. native-task should not run unit tests if native profile is not e
|
||||
MAPREDUCE-5995. native-task: Revert changes to Text internals (todd)
|
||||
MAPREDUCE-6005. native-task: Fix some valgrind errors (Binglin Chang)
|
||||
MAPREDUCE-5984. native-task: Reuse lz4 sources in hadoop-common (Binglin Chang)
|
||||
MAPREDUCE-5976. native-task: should not fail to build if snappy is missing (Manu Zhang)
|
||||
MAPREDUCE-5976. native-task: should not fail to build if snappy is missing (Manu Zhang)
|
||||
MAPREDUCE-5978. native-task: remove test case for not supported codec Bzip2Codec and DefaultCodec (Manu Zhang)
|
@ -67,23 +67,6 @@ public void testGzipCompress() throws Exception {
|
||||
assertEquals("file compare result: if they are the same ,then return true", true, compareRet);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testBzip2Compress() throws Exception {
|
||||
final Configuration nativeconf = ScenarioConfiguration.getNativeConfiguration();
|
||||
nativeconf.addResource(TestConstants.BZIP2_COMPRESS_CONF_PATH);
|
||||
final Job nativejob = CompressMapper.getCompressJob("nativebzip2", nativeconf);
|
||||
nativejob.waitForCompletion(true);
|
||||
|
||||
final Configuration hadoopconf = ScenarioConfiguration.getNormalConfiguration();
|
||||
hadoopconf.addResource(TestConstants.BZIP2_COMPRESS_CONF_PATH);
|
||||
final Job hadoopjob = CompressMapper.getCompressJob("hadoopbzip2", hadoopconf);
|
||||
hadoopjob.waitForCompletion(true);
|
||||
|
||||
final boolean compareRet = ResultVerifier.verify(CompressMapper.outputFileDir + "nativebzip2",
|
||||
CompressMapper.outputFileDir + "hadoopbzip2");
|
||||
assertEquals("file compare result: if they are the same ,then return true", true, compareRet);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testLz4Compress() throws Exception {
|
||||
final Configuration nativeConf = ScenarioConfiguration.getNativeConfiguration();
|
||||
@ -100,22 +83,6 @@ public void testLz4Compress() throws Exception {
|
||||
assertEquals("file compare result: if they are the same ,then return true", true, compareRet);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDefaultCompress() throws Exception {
|
||||
final Configuration nativeConf = ScenarioConfiguration.getNativeConfiguration();
|
||||
nativeConf.addResource(TestConstants.DEFAULT_COMPRESS_CONF_PATH);
|
||||
final Job nativeJob = CompressMapper.getCompressJob("nativedefault", nativeConf);
|
||||
nativeJob.waitForCompletion(true);
|
||||
|
||||
final Configuration hadoopConf = ScenarioConfiguration.getNormalConfiguration();
|
||||
hadoopConf.addResource(TestConstants.DEFAULT_COMPRESS_CONF_PATH);
|
||||
final Job hadoopJob = CompressMapper.getCompressJob("hadoopdefault", hadoopConf);
|
||||
hadoopJob.waitForCompletion(true);
|
||||
final boolean compareRet = ResultVerifier.verify(CompressMapper.outputFileDir + "nativedefault",
|
||||
CompressMapper.outputFileDir + "hadoopdefault");
|
||||
assertEquals("file compare result: if they are the same ,then return true", true, compareRet);
|
||||
}
|
||||
|
||||
@Before
|
||||
public void startUp() throws Exception {
|
||||
final ScenarioConfiguration conf = new ScenarioConfiguration();
|
||||
|
@ -1,39 +0,0 @@
|
||||
<?xml version="1.0"?>
|
||||
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
|
||||
|
||||
<!-- Put site-specific property overrides in this file. -->
|
||||
|
||||
<configuration>
|
||||
<property>
|
||||
<name>mapred.output.compress</name>
|
||||
<value>false</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>nativetask.compress.filesize</name>
|
||||
<value>100000</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>mapreduce.reduce.class</name>
|
||||
<value>org.apache.hadoop.mapred.nativetask.kvtest.HashSumReducer</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>mapred.output.value.class</name>
|
||||
<value>org.apache.hadoop.io.IntWritable</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>mapred.compress.map.output</name>
|
||||
<value>true</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>mapred.output.compression.type</name>
|
||||
<value>BLOCK</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>mapred.map.output.compression.codec</name>
|
||||
<value>org.apache.hadoop.io.compress.BZip2Codec</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>hadoop.native.lib</name>
|
||||
<value>true</value>
|
||||
</property>
|
||||
</configuration>
|
@ -1,39 +0,0 @@
|
||||
<?xml version="1.0"?>
|
||||
<?xml-stylesheet type="text/xsl" href="configuration.xsl"?>
|
||||
|
||||
<!-- Put site-specific property overrides in this file. -->
|
||||
|
||||
<configuration>
|
||||
<property>
|
||||
<name>mapred.output.compress</name>
|
||||
<value>false</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>mapred.output.value.class</name>
|
||||
<value>org.apache.hadoop.io.IntWritable</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>nativetask.compress.filesize</name>
|
||||
<value>100000</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>mapreduce.reduce.class</name>
|
||||
<value>org.apache.hadoop.mapred.nativetask.kvtest.HashSumReducer</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>mapred.compress.map.output</name>
|
||||
<value>true</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>mapred.output.compression.type</name>
|
||||
<value>BLOCK</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>mapred.map.output.compression.codec</name>
|
||||
<value>org.apache.hadoop.io.compress.DefaultCodec</value>
|
||||
</property>
|
||||
<property>
|
||||
<name>hadoop.native.lib</name>
|
||||
<value>true</value>
|
||||
</property>
|
||||
</configuration>
|
Loading…
Reference in New Issue
Block a user