diff --git a/CHANGES.txt b/CHANGES.txt index c66f5169cb..b79a86d906 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -7,6 +7,9 @@ Trunk (unreleased changes) HADOOP-6730. Bug in FileContext#copy and provide base class for FileContext tests. (Ravi Phulari via jghoman) + HADOOP-6669. Respect compression configuration when creating DefaultCodec + instances. (Koji Noguchi via cdouglas) + Release 0.21.0 - Unreleased INCOMPATIBLE CHANGES diff --git a/src/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java b/src/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java index ce2452768b..000518d145 100644 --- a/src/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java +++ b/src/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java @@ -86,7 +86,8 @@ public static boolean isNativeZlibLoaded(Configuration conf) { */ public static Compressor getZlibCompressor(Configuration conf) { return (isNativeZlibLoaded(conf)) ? - new ZlibCompressor() : new BuiltInZlibDeflater(); + new ZlibCompressor(conf) : + new BuiltInZlibDeflater(ZlibFactory.getCompressionLevel(conf).compressionLevel()); } /** diff --git a/src/test/core/org/apache/hadoop/io/compress/TestCodec.java b/src/test/core/org/apache/hadoop/io/compress/TestCodec.java index f02d2f0cda..cdb33faaa5 100644 --- a/src/test/core/org/apache/hadoop/io/compress/TestCodec.java +++ b/src/test/core/org/apache/hadoop/io/compress/TestCodec.java @@ -166,6 +166,8 @@ private static void codecTest(Configuration conf, int seed, int count, RandomDatum v2 = new RandomDatum(); k2.readFields(inflateIn); v2.readFields(inflateIn); + assertTrue("original and compressed-then-decompressed-output not equal", + k1.equals(k2) && v1.equals(v2)); } LOG.info("SUCCESS! Completed checking " + count + " records"); } @@ -322,6 +324,61 @@ private static void gzipReinitTest(Configuration conf, CompressionCodec codec) outbytes.length >= b.length); } + private static void codecTestWithNOCompression (Configuration conf, + String codecClass) throws IOException { + // Create a compressor with NO_COMPRESSION and make sure that + // output is not compressed by comparing the size with the + // original input + + CompressionCodec codec = null; + ZlibFactory.setCompressionLevel(conf, CompressionLevel.NO_COMPRESSION); + try { + codec = (CompressionCodec) + ReflectionUtils.newInstance(conf.getClassByName(codecClass), conf); + } catch (ClassNotFoundException cnfe) { + throw new IOException("Illegal codec!"); + } + Compressor c = codec.createCompressor(); + // ensure same compressor placed earlier + ByteArrayOutputStream bos = new ByteArrayOutputStream(); + CompressionOutputStream cos = null; + // write trivially compressable data + byte[] b = new byte[1 << 15]; + Arrays.fill(b, (byte) 43); + try { + cos = codec.createOutputStream(bos, c); + cos.write(b); + } finally { + if (cos != null) { + cos.close(); + } + } + byte[] outbytes = bos.toByteArray(); + // verify data were not compressed + assertTrue("Compressed bytes contrary to configuration(NO_COMPRESSION)", + outbytes.length >= b.length); + } + + @Test + public void testCodecInitWithCompressionLevel() throws Exception { + Configuration conf = new Configuration(); + conf.setBoolean("io.native.lib.available", true); + if (ZlibFactory.isNativeZlibLoaded(conf)) { + LOG.info("testCodecInitWithCompressionLevel with native"); + codecTestWithNOCompression(conf, + "org.apache.hadoop.io.compress.GzipCodec"); + codecTestWithNOCompression(conf, + "org.apache.hadoop.io.compress.DefaultCodec"); + } else { + LOG.warn("testCodecInitWithCompressionLevel for native skipped" + + ": native libs not loaded"); + } + conf = new Configuration(); + conf.setBoolean("io.native.lib.available", false); + codecTestWithNOCompression( conf, + "org.apache.hadoop.io.compress.DefaultCodec"); + } + @Test public void testCodecPoolCompressorReinit() throws Exception { Configuration conf = new Configuration();