HDFS-11263. ClassCastException when we use Bzipcodec for Fsimage compression. Contributed by Brahma Reddy Battula.
This commit is contained in:
parent
fe4ff64a4a
commit
1b401f6a73
@ -40,6 +40,7 @@
|
|||||||
import java.util.Map.Entry;
|
import java.util.Map.Entry;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
|
import org.apache.hadoop.io.compress.CompressionOutputStream;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
@ -63,7 +64,6 @@
|
|||||||
import org.apache.hadoop.hdfs.util.MD5FileUtils;
|
import org.apache.hadoop.hdfs.util.MD5FileUtils;
|
||||||
import org.apache.hadoop.io.MD5Hash;
|
import org.apache.hadoop.io.MD5Hash;
|
||||||
import org.apache.hadoop.io.compress.CompressionCodec;
|
import org.apache.hadoop.io.compress.CompressionCodec;
|
||||||
import org.apache.hadoop.io.compress.CompressorStream;
|
|
||||||
import org.apache.hadoop.util.LimitInputStream;
|
import org.apache.hadoop.util.LimitInputStream;
|
||||||
import org.apache.hadoop.util.Time;
|
import org.apache.hadoop.util.Time;
|
||||||
|
|
||||||
@ -417,7 +417,7 @@ public void commitSection(FileSummary.Builder summary, SectionName name)
|
|||||||
|
|
||||||
private void flushSectionOutputStream() throws IOException {
|
private void flushSectionOutputStream() throws IOException {
|
||||||
if (codec != null) {
|
if (codec != null) {
|
||||||
((CompressorStream) sectionOutputStream).finish();
|
((CompressionOutputStream) sectionOutputStream).finish();
|
||||||
}
|
}
|
||||||
sectionOutputStream.flush();
|
sectionOutputStream.flush();
|
||||||
}
|
}
|
||||||
|
@ -52,7 +52,6 @@
|
|||||||
import org.apache.hadoop.hdfs.DistributedFileSystem;
|
import org.apache.hadoop.hdfs.DistributedFileSystem;
|
||||||
import org.apache.hadoop.hdfs.HdfsConfiguration;
|
import org.apache.hadoop.hdfs.HdfsConfiguration;
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
||||||
import org.apache.hadoop.hdfs.StripedFileTestUtil;
|
|
||||||
import org.apache.hadoop.hdfs.client.HdfsDataOutputStream.SyncFlag;
|
import org.apache.hadoop.hdfs.client.HdfsDataOutputStream.SyncFlag;
|
||||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
import org.apache.hadoop.hdfs.protocol.HdfsConstants;
|
||||||
import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
|
import org.apache.hadoop.hdfs.protocol.HdfsConstants.SafeModeAction;
|
||||||
@ -84,8 +83,15 @@ public void testPersist() throws IOException {
|
|||||||
public void testCompression() throws IOException {
|
public void testCompression() throws IOException {
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
conf.setBoolean(DFSConfigKeys.DFS_IMAGE_COMPRESS_KEY, true);
|
conf.setBoolean(DFSConfigKeys.DFS_IMAGE_COMPRESS_KEY, true);
|
||||||
conf.set(DFSConfigKeys.DFS_IMAGE_COMPRESSION_CODEC_KEY,
|
setCompressCodec(conf, "org.apache.hadoop.io.compress.DefaultCodec");
|
||||||
"org.apache.hadoop.io.compress.GzipCodec");
|
setCompressCodec(conf, "org.apache.hadoop.io.compress.GzipCodec");
|
||||||
|
setCompressCodec(conf, "org.apache.hadoop.io.compress.BZip2Codec");
|
||||||
|
setCompressCodec(conf, "org.apache.hadoop.io.compress.Lz4Codec");
|
||||||
|
}
|
||||||
|
|
||||||
|
private void setCompressCodec(Configuration conf, String compressCodec)
|
||||||
|
throws IOException {
|
||||||
|
conf.set(DFSConfigKeys.DFS_IMAGE_COMPRESSION_CODEC_KEY, compressCodec);
|
||||||
testPersistHelper(conf);
|
testPersistHelper(conf);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user