HADOOP-18390. Fix out of sync import for HADOOP-18321 (#4694)
Co-authored-by: Ashutosh Gupta <ashugpt@amazon.com> Signed-off-by: Akira Ajisaka <aajisaka@apache.org>
This commit is contained in:
parent
b5642c5638
commit
bd0f9a46e1
@ -18,9 +18,6 @@
|
||||
|
||||
package org.apache.hadoop.io.compress.bzip2;
|
||||
|
||||
import static org.apache.hadoop.io.compress.bzip2.CBZip2OutputStream.MIN_BLOCKSIZE;
|
||||
import static org.apache.hadoop.util.Preconditions.checkArgument;
|
||||
|
||||
import java.io.Closeable;
|
||||
import java.io.IOException;
|
||||
import java.io.OutputStream;
|
||||
@ -30,6 +27,9 @@
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.compress.BZip2Codec;
|
||||
|
||||
import static org.apache.hadoop.io.compress.bzip2.CBZip2OutputStream.MIN_BLOCKSIZE;
|
||||
import static org.apache.hadoop.util.Preconditions.checkArgument;
|
||||
|
||||
/**
|
||||
* A writer that simplifies creating BZip2 compressed text data for testing
|
||||
* purposes.
|
||||
|
@ -18,8 +18,6 @@
|
||||
|
||||
package org.apache.hadoop.io.compress.bzip2;
|
||||
|
||||
import static org.apache.hadoop.io.compress.SplittableCompressionCodec.READ_MODE.BYBLOCK;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.util.ArrayList;
|
||||
@ -29,6 +27,8 @@
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
|
||||
import static org.apache.hadoop.io.compress.SplittableCompressionCodec.READ_MODE.BYBLOCK;
|
||||
|
||||
public final class BZip2Utils {
|
||||
|
||||
private BZip2Utils() {
|
||||
|
@ -17,9 +17,6 @@
|
||||
*/
|
||||
package org.apache.hadoop.io.compress.bzip2;
|
||||
|
||||
import static org.apache.hadoop.io.compress.bzip2.BZip2TextFileWriter.BLOCK_SIZE;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.IOException;
|
||||
@ -29,6 +26,9 @@
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import static org.apache.hadoop.io.compress.bzip2.BZip2TextFileWriter.BLOCK_SIZE;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
public final class TestBZip2TextFileWriter {
|
||||
|
||||
private static final byte[] DELIMITER = new byte[] {'\0'};
|
||||
|
@ -17,24 +17,25 @@
|
||||
*/
|
||||
package org.apache.hadoop.mapreduce.lib.input;
|
||||
|
||||
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY;
|
||||
import static org.apache.hadoop.io.compress.bzip2.BZip2TextFileWriter.BLOCK_SIZE;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.compress.bzip2.BZip2Utils;
|
||||
import org.apache.hadoop.io.compress.bzip2.BZip2TextFileWriter;
|
||||
import org.junit.After;
|
||||
import org.junit.Before;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.compress.bzip2.BZip2TextFileWriter;
|
||||
import org.apache.hadoop.io.compress.bzip2.BZip2Utils;
|
||||
|
||||
import static org.apache.hadoop.fs.CommonConfigurationKeysPublic.IO_FILE_BUFFER_SIZE_KEY;
|
||||
import static org.apache.hadoop.io.compress.bzip2.BZip2TextFileWriter.BLOCK_SIZE;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
public abstract class BaseTestLineRecordReaderBZip2 {
|
||||
|
||||
// LF stands for line feed
|
||||
|
Loading…
Reference in New Issue
Block a user