diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index 248bb5f424..45bfaa7c0f 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -348,6 +348,9 @@ Release 2.3.0 - UNRELEASED
HADOOP-9225. Cover package org.apache.hadoop.compress.Snappy (Vadim
Bondarev, Andrey Klochkov and Nathan Roberts via jlowe)
+ HADOOP-9199. Cover package org.apache.hadoop.io with unit tests (Andrey
+ Klochkov via jeagles)
+
OPTIMIZATIONS
HADOOP-9748. Reduce blocking on UGI.ensureInitialized (daryn)
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java
index 2cbae5b06f..ecf1db0501 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayFile.java
@@ -19,18 +19,23 @@
package org.apache.hadoop.io;
import java.io.*;
+
import junit.framework.TestCase;
import org.apache.commons.logging.*;
-
import org.apache.hadoop.fs.*;
+import org.apache.hadoop.io.SequenceFile.CompressionType;
+import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.conf.*;
/** Support for flat files of binary key/value pairs. */
public class TestArrayFile extends TestCase {
private static final Log LOG = LogFactory.getLog(TestArrayFile.class);
- private static String FILE =
- System.getProperty("test.build.data",".") + "/test.array";
+
+ private static final Path TEST_DIR = new Path(
+ System.getProperty("test.build.data", "/tmp"),
+ TestMapFile.class.getSimpleName());
+ private static String TEST_FILE = new Path(TEST_DIR, "test.array").toString();
public TestArrayFile(String name) {
super(name);
@@ -40,15 +45,15 @@ public class TestArrayFile extends TestCase {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.getLocal(conf);
RandomDatum[] data = generate(10000);
- writeTest(fs, data, FILE);
- readTest(fs, data, FILE, conf);
+ writeTest(fs, data, TEST_FILE);
+ readTest(fs, data, TEST_FILE, conf);
}
public void testEmptyFile() throws Exception {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.getLocal(conf);
- writeTest(fs, new RandomDatum[0], FILE);
- ArrayFile.Reader reader = new ArrayFile.Reader(fs, FILE, conf);
+ writeTest(fs, new RandomDatum[0], TEST_FILE);
+ ArrayFile.Reader reader = new ArrayFile.Reader(fs, TEST_FILE, conf);
assertNull(reader.get(0, new RandomDatum()));
reader.close();
}
@@ -87,31 +92,75 @@ public class TestArrayFile extends TestCase {
LOG.debug("reading " + data.length + " debug");
}
ArrayFile.Reader reader = new ArrayFile.Reader(fs, file, conf);
- for (int i = 0; i < data.length; i++) { // try forwards
- reader.get(i, v);
- if (!v.equals(data[i])) {
- throw new RuntimeException("wrong value at " + i);
+ try {
+ for (int i = 0; i < data.length; i++) { // try forwards
+ reader.get(i, v);
+ if (!v.equals(data[i])) {
+ throw new RuntimeException("wrong value at " + i);
+ }
}
- }
- for (int i = data.length-1; i >= 0; i--) { // then backwards
- reader.get(i, v);
- if (!v.equals(data[i])) {
- throw new RuntimeException("wrong value at " + i);
+ for (int i = data.length-1; i >= 0; i--) { // then backwards
+ reader.get(i, v);
+ if (!v.equals(data[i])) {
+ throw new RuntimeException("wrong value at " + i);
+ }
}
- }
- reader.close();
- if(LOG.isDebugEnabled()) {
- LOG.debug("done reading " + data.length + " debug");
+ if(LOG.isDebugEnabled()) {
+ LOG.debug("done reading " + data.length + " debug");
+ }
+ } finally {
+ reader.close();
}
}
-
+ /**
+ * test on {@link ArrayFile.Reader} iteration methods
+ *
+ * {@code next(), seek()} in and out of range.
+ *
+ */
+ public void testArrayFileIteration() {
+ int SIZE = 10;
+ Configuration conf = new Configuration();
+ try {
+ FileSystem fs = FileSystem.get(conf);
+ ArrayFile.Writer writer = new ArrayFile.Writer(conf, fs, TEST_FILE,
+ LongWritable.class, CompressionType.RECORD, defaultProgressable);
+ assertNotNull("testArrayFileIteration error !!!", writer);
+
+ for (int i = 0; i < SIZE; i++)
+ writer.append(new LongWritable(i));
+
+ writer.close();
+
+ ArrayFile.Reader reader = new ArrayFile.Reader(fs, TEST_FILE, conf);
+ LongWritable nextWritable = new LongWritable(0);
+
+ for (int i = 0; i < SIZE; i++) {
+ nextWritable = (LongWritable)reader.next(nextWritable);
+ assertEquals(nextWritable.get(), i);
+ }
+
+ assertTrue("testArrayFileIteration seek error !!!",
+ reader.seek(new LongWritable(6)));
+ nextWritable = (LongWritable) reader.next(nextWritable);
+ assertTrue("testArrayFileIteration error !!!", reader.key() == 7);
+ assertTrue("testArrayFileIteration error !!!",
+ nextWritable.equals(new LongWritable(7)));
+ assertFalse("testArrayFileIteration error !!!",
+ reader.seek(new LongWritable(SIZE + 5)));
+ reader.close();
+ } catch (Exception ex) {
+ fail("testArrayFileWriterConstruction error !!!");
+ }
+ }
+
/** For debugging and testing. */
public static void main(String[] args) throws Exception {
int count = 1024 * 1024;
boolean create = true;
boolean check = true;
- String file = FILE;
+ String file = TEST_FILE;
String usage = "Usage: TestArrayFile [-count N] [-nocreate] [-nocheck] file";
if (args.length == 0) {
@@ -160,4 +209,11 @@ public class TestArrayFile extends TestCase {
fs.close();
}
}
+
+ private static final Progressable defaultProgressable = new Progressable() {
+ @Override
+ public void progress() {
+ }
+ };
+
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayWritable.java
index 47d0ce9f63..a2008db805 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayWritable.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestArrayWritable.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -20,6 +20,8 @@ package org.apache.hadoop.io;
import java.io.*;
+import org.junit.Assert;
+
import junit.framework.TestCase;
/** Unit tests for ArrayWritable */
@@ -61,4 +63,50 @@ public class TestArrayWritable extends TestCase {
assertEquals(destElements[i],elements[i]);
}
}
+
+ /**
+ * test {@link ArrayWritable} toArray() method
+ */
+ public void testArrayWritableToArray() {
+ Text[] elements = {new Text("zero"), new Text("one"), new Text("two")};
+ TextArrayWritable arrayWritable = new TextArrayWritable();
+ arrayWritable.set(elements);
+ Object array = arrayWritable.toArray();
+
+ assertTrue("TestArrayWritable testArrayWritableToArray error!!! ", array instanceof Text[]);
+ Text[] destElements = (Text[]) array;
+
+ for (int i = 0; i < elements.length; i++) {
+ assertEquals(destElements[i], elements[i]);
+ }
+ }
+
+ /**
+ * test {@link ArrayWritable} constructor with null
+ */
+ public void testNullArgument() {
+ try {
+ Class extends Writable> valueClass = null;
+ new ArrayWritable(valueClass);
+ fail("testNullArgument error !!!");
+ } catch (IllegalArgumentException exp) {
+ //should be for test pass
+ } catch (Exception e) {
+ fail("testNullArgument error !!!");
+ }
+ }
+
+ /**
+ * test {@link ArrayWritable} constructor with {@code String[]} as a parameter
+ */
+ @SuppressWarnings("deprecation")
+ public void testArrayWritableStringConstructor() {
+ String[] original = { "test1", "test2", "test3" };
+ ArrayWritable arrayWritable = new ArrayWritable(original);
+ assertEquals("testArrayWritableStringConstructor class error!!!",
+ UTF8.class, arrayWritable.getValueClass());
+ Assert.assertArrayEquals("testArrayWritableStringConstructor toString error!!!",
+ original, arrayWritable.toStrings());
+ }
+
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBloomMapFile.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBloomMapFile.java
index 9cf0bbc2fa..b7b084abcb 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBloomMapFile.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBloomMapFile.java
@@ -18,28 +18,53 @@
package org.apache.hadoop.io;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
+import junit.framework.TestCase;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
-
-import junit.framework.TestCase;
+import org.apache.hadoop.io.SequenceFile.CompressionType;
+import org.apache.hadoop.io.compress.CompressionCodec;
+import org.apache.hadoop.io.compress.CompressionInputStream;
+import org.apache.hadoop.io.compress.CompressionOutputStream;
+import org.apache.hadoop.io.compress.Compressor;
+import org.apache.hadoop.io.compress.Decompressor;
+import org.apache.hadoop.util.Progressable;
+import org.junit.Assert;
public class TestBloomMapFile extends TestCase {
private static Configuration conf = new Configuration();
+ private static final Path TEST_ROOT = new Path(
+ System.getProperty("test.build.data", "/tmp"),
+ TestMapFile.class.getSimpleName());
+ private static final Path TEST_DIR = new Path(TEST_ROOT, "testfile");
+ private static final Path TEST_FILE = new Path(TEST_ROOT, "testfile");
+
+ @Override
+ public void setUp() throws Exception {
+ LocalFileSystem fs = FileSystem.getLocal(conf);
+ if (fs.exists(TEST_ROOT) && !fs.delete(TEST_ROOT, true)) {
+ Assert.fail("Can't clean up test root dir");
+ }
+ fs.mkdirs(TEST_ROOT);
+ }
+ @SuppressWarnings("deprecation")
public void testMembershipTest() throws Exception {
// write the file
- Path dirName = new Path(System.getProperty("test.build.data",".") +
- getName() + ".bloommapfile");
FileSystem fs = FileSystem.getLocal(conf);
- Path qualifiedDirName = fs.makeQualified(dirName);
+ Path qualifiedDirName = fs.makeQualified(TEST_DIR);
conf.setInt("io.mapfile.bloom.size", 2048);
BloomMapFile.Writer writer = new BloomMapFile.Writer(conf, fs,
- qualifiedDirName.toString(), IntWritable.class, Text.class);
+ qualifiedDirName.toString(), IntWritable.class, Text.class);
IntWritable key = new IntWritable();
Text value = new Text();
for (int i = 0; i < 2000; i += 2) {
@@ -48,7 +73,7 @@ public class TestBloomMapFile extends TestCase {
writer.append(key, value);
}
writer.close();
-
+
BloomMapFile.Reader reader = new BloomMapFile.Reader(fs,
qualifiedDirName.toString(), conf);
// check false positives rate
@@ -58,9 +83,11 @@ public class TestBloomMapFile extends TestCase {
key.set(i);
boolean exists = reader.probablyHasKey(key);
if (i % 2 == 0) {
- if (!exists) falseNeg++;
+ if (!exists)
+ falseNeg++;
} else {
- if (exists) falsePos++;
+ if (exists)
+ falsePos++;
}
}
reader.close();
@@ -71,13 +98,13 @@ public class TestBloomMapFile extends TestCase {
assertTrue(falsePos < 2);
}
- private void checkMembershipVaryingSizedKeys(String name, List keys) throws Exception {
- Path dirName = new Path(System.getProperty("test.build.data",".") +
- name + ".bloommapfile");
+ @SuppressWarnings("deprecation")
+ private void checkMembershipVaryingSizedKeys(String name, List keys)
+ throws Exception {
FileSystem fs = FileSystem.getLocal(conf);
- Path qualifiedDirName = fs.makeQualified(dirName);
+ Path qualifiedDirName = fs.makeQualified(TEST_DIR);
BloomMapFile.Writer writer = new BloomMapFile.Writer(conf, fs,
- qualifiedDirName.toString(), Text.class, NullWritable.class);
+ qualifiedDirName.toString(), Text.class, NullWritable.class);
for (Text key : keys) {
writer.append(key, NullWritable.get());
}
@@ -88,7 +115,8 @@ public class TestBloomMapFile extends TestCase {
qualifiedDirName.toString(), conf);
Collections.reverse(keys);
for (Text key : keys) {
- assertTrue("False negative for existing key " + key, reader.probablyHasKey(key));
+ assertTrue("False negative for existing key " + key,
+ reader.probablyHasKey(key));
}
reader.close();
fs.delete(qualifiedDirName, true);
@@ -108,4 +136,171 @@ public class TestBloomMapFile extends TestCase {
checkMembershipVaryingSizedKeys(getName(), list);
}
+ /**
+ * test {@code BloomMapFile.delete()} method
+ */
+ public void testDeleteFile() {
+ try {
+ FileSystem fs = FileSystem.getLocal(conf);
+ BloomMapFile.Writer writer = new BloomMapFile.Writer(conf, TEST_FILE,
+ MapFile.Writer.keyClass(IntWritable.class),
+ MapFile.Writer.valueClass(Text.class));
+ assertNotNull("testDeleteFile error !!!", writer);
+ BloomMapFile.delete(fs, "." + TEST_FILE);
+ } catch (Exception ex) {
+ fail("unexpect ex in testDeleteFile !!!");
+ }
+ }
+
+ /**
+ * test {@link BloomMapFile.Reader} constructor with
+ * IOException
+ */
+ public void testIOExceptionInWriterConstructor() {
+ Path dirNameSpy = org.mockito.Mockito.spy(TEST_FILE);
+ try {
+ BloomMapFile.Writer writer = new BloomMapFile.Writer(conf, TEST_FILE,
+ MapFile.Writer.keyClass(IntWritable.class),
+ MapFile.Writer.valueClass(Text.class));
+ writer.append(new IntWritable(1), new Text("123124142"));
+ writer.close();
+
+ org.mockito.Mockito.when(dirNameSpy.getFileSystem(conf)).thenThrow(
+ new IOException());
+ BloomMapFile.Reader reader = new BloomMapFile.Reader(dirNameSpy, conf,
+ MapFile.Reader.comparator(new WritableComparator(IntWritable.class)));
+
+ assertNull("testIOExceptionInWriterConstructor error !!!",
+ reader.getBloomFilter());
+ reader.close();
+ } catch (Exception ex) {
+ fail("unexpect ex in testIOExceptionInWriterConstructor !!!");
+ }
+ }
+
+ /**
+ * test {@link BloomMapFile.Reader.get()} method
+ */
+ public void testGetBloomMapFile() {
+ int SIZE = 10;
+ try {
+ BloomMapFile.Writer writer = new BloomMapFile.Writer(conf, TEST_FILE,
+ MapFile.Writer.keyClass(IntWritable.class),
+ MapFile.Writer.valueClass(Text.class));
+
+ for (int i = 0; i < SIZE; i++) {
+ writer.append(new IntWritable(i), new Text());
+ }
+ writer.close();
+
+ BloomMapFile.Reader reader = new BloomMapFile.Reader(TEST_FILE, conf,
+ MapFile.Reader.comparator(new WritableComparator(IntWritable.class)));
+
+ for (int i = 0; i < SIZE; i++) {
+ assertNotNull("testGetBloomMapFile error !!!",
+ reader.get(new IntWritable(i), new Text()));
+ }
+
+ assertNull("testGetBloomMapFile error !!!",
+ reader.get(new IntWritable(SIZE + 5), new Text()));
+ reader.close();
+ } catch (Exception ex) {
+ fail("unexpect ex in testGetBloomMapFile !!!");
+ }
+ }
+
+ /**
+ * test {@code BloomMapFile.Writer} constructors
+ */
+ @SuppressWarnings("deprecation")
+ public void testBloomMapFileConstructors() {
+ try {
+ FileSystem ts = FileSystem.get(conf);
+ String testFileName = TEST_FILE.toString();
+ BloomMapFile.Writer writer1 = new BloomMapFile.Writer(conf, ts,
+ testFileName, IntWritable.class, Text.class, CompressionType.BLOCK,
+ defaultCodec, defaultProgress);
+ assertNotNull("testBloomMapFileConstructors error !!!", writer1);
+ BloomMapFile.Writer writer2 = new BloomMapFile.Writer(conf, ts,
+ testFileName, IntWritable.class, Text.class, CompressionType.BLOCK,
+ defaultProgress);
+ assertNotNull("testBloomMapFileConstructors error !!!", writer2);
+ BloomMapFile.Writer writer3 = new BloomMapFile.Writer(conf, ts,
+ testFileName, IntWritable.class, Text.class, CompressionType.BLOCK);
+ assertNotNull("testBloomMapFileConstructors error !!!", writer3);
+ BloomMapFile.Writer writer4 = new BloomMapFile.Writer(conf, ts,
+ testFileName, IntWritable.class, Text.class, CompressionType.RECORD,
+ defaultCodec, defaultProgress);
+ assertNotNull("testBloomMapFileConstructors error !!!", writer4);
+ BloomMapFile.Writer writer5 = new BloomMapFile.Writer(conf, ts,
+ testFileName, IntWritable.class, Text.class, CompressionType.RECORD,
+ defaultProgress);
+ assertNotNull("testBloomMapFileConstructors error !!!", writer5);
+ BloomMapFile.Writer writer6 = new BloomMapFile.Writer(conf, ts,
+ testFileName, IntWritable.class, Text.class, CompressionType.RECORD);
+ assertNotNull("testBloomMapFileConstructors error !!!", writer6);
+ BloomMapFile.Writer writer7 = new BloomMapFile.Writer(conf, ts,
+ testFileName, WritableComparator.get(Text.class), Text.class);
+ assertNotNull("testBloomMapFileConstructors error !!!", writer7);
+ } catch (Exception ex) {
+ fail("testBloomMapFileConstructors error !!!");
+ }
+ }
+
+ static final Progressable defaultProgress = new Progressable() {
+ @Override
+ public void progress() {
+ }
+ };
+
+ static final CompressionCodec defaultCodec = new CompressionCodec() {
+ @Override
+ public String getDefaultExtension() {
+ return null;
+ }
+
+ @Override
+ public Class extends Decompressor> getDecompressorType() {
+ return null;
+ }
+
+ @Override
+ public Class extends Compressor> getCompressorType() {
+ return null;
+ }
+
+ @Override
+ public CompressionOutputStream createOutputStream(OutputStream out,
+ Compressor compressor) throws IOException {
+ return null;
+ }
+
+ @Override
+ public CompressionOutputStream createOutputStream(OutputStream out)
+ throws IOException {
+ return null;
+ }
+
+ @Override
+ public CompressionInputStream createInputStream(InputStream in,
+ Decompressor decompressor) throws IOException {
+ return null;
+ }
+
+ @Override
+ public CompressionInputStream createInputStream(InputStream in)
+ throws IOException {
+ return null;
+ }
+
+ @Override
+ public Decompressor createDecompressor() {
+ return null;
+ }
+
+ @Override
+ public Compressor createCompressor() {
+ return null;
+ }
+ };
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBooleanWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBooleanWritable.java
index 8d49425c08..23c28fbe07 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBooleanWritable.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBooleanWritable.java
@@ -50,4 +50,28 @@ public class TestBooleanWritable {
out.flush();
return out;
}
+
+ /**
+ * test {@link BooleanWritable} methods hashCode(), equals(), compareTo()
+ */
+ @Test
+ public void testCommonMethods() {
+ assertTrue("testCommonMethods1 error !!!", newInstance(true).equals(newInstance(true)));
+ assertTrue("testCommonMethods2 error !!!", newInstance(false).equals(newInstance(false)));
+ assertFalse("testCommonMethods3 error !!!", newInstance(false).equals(newInstance(true)));
+ assertTrue("testCommonMethods4 error !!!", checkHashCode(newInstance(true), newInstance(true)));
+ assertFalse("testCommonMethods5 error !!! ", checkHashCode(newInstance(true), newInstance(false)));
+ assertTrue("testCommonMethods6 error !!!", newInstance(true).compareTo(newInstance(false)) > 0 );
+ assertTrue("testCommonMethods7 error !!!", newInstance(false).compareTo(newInstance(true)) < 0 );
+ assertTrue("testCommonMethods8 error !!!", newInstance(false).compareTo(newInstance(false)) == 0 );
+ assertEquals("testCommonMethods9 error !!!", "true", newInstance(true).toString());
+ }
+
+ private boolean checkHashCode(BooleanWritable f, BooleanWritable s) {
+ return f.hashCode() == s.hashCode();
+ }
+
+ private static BooleanWritable newInstance(boolean flag) {
+ return new BooleanWritable(flag);
+ }
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBytesWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBytesWritable.java
index 014b8682d8..698ae32e4c 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBytesWritable.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestBytesWritable.java
@@ -133,5 +133,24 @@ public class TestBytesWritable {
assertTrue("buffer created with (array, len) has bad length",
zeroBuf.getLength() == copyBuf.getLength());
}
+
+ /**
+ * test {@link ByteWritable}
+ * methods compareTo(), toString(), equals()
+ */
+ @Test
+ public void testObjectCommonMethods() {
+ byte b = 0x9;
+ ByteWritable bw = new ByteWritable();
+ bw.set(b);
+ assertTrue("testSetByteWritable error", bw.get() == b);
+ assertTrue("testSetByteWritable error < 0", bw.compareTo(new ByteWritable((byte)0xA)) < 0);
+ assertTrue("testSetByteWritable error > 0", bw.compareTo(new ByteWritable((byte)0x8)) > 0);
+ assertTrue("testSetByteWritable error == 0", bw.compareTo(new ByteWritable((byte)0x9)) == 0);
+ assertTrue("testSetByteWritable equals error !!!", bw.equals(new ByteWritable((byte)0x9)));
+ assertTrue("testSetByteWritable equals error !!!", ! bw.equals(new ByteWritable((byte)0xA)));
+ assertTrue("testSetByteWritable equals error !!!", ! bw.equals(new IntWritable(1)));
+ assertEquals("testSetByteWritable error ", "9", bw.toString());
+ }
+
}
-
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java
index 077c0b065d..f48d308965 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestEnumSetWritable.java
@@ -1,4 +1,4 @@
-/**
+/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
@@ -20,6 +20,7 @@ package org.apache.hadoop.io;
import java.io.IOException;
import java.util.EnumSet;
+import java.util.Iterator;
import java.lang.reflect.Type;
import junit.framework.TestCase;
@@ -32,8 +33,8 @@ public class TestEnumSetWritable extends TestCase {
}
EnumSet nonEmptyFlag = EnumSet.of(TestEnumSet.APPEND);
- EnumSetWritable nonEmptyFlagWritable = new EnumSetWritable(
- nonEmptyFlag);
+ EnumSetWritable nonEmptyFlagWritable =
+ new EnumSetWritable(nonEmptyFlag);
@SuppressWarnings("unchecked")
public void testSerializeAndDeserializeNonEmpty() throws IOException {
@@ -60,11 +61,12 @@ public class TestEnumSetWritable extends TestCase {
}
assertTrue(
- "Instantiate empty EnumSetWritable with no element type class providesd should throw exception.",
+ "Instantiation of empty EnumSetWritable with no element type class "
+ + "provided should throw exception.",
gotException);
- EnumSetWritable emptyFlagWritable = new EnumSetWritable(
- emptyFlag, TestEnumSet.class);
+ EnumSetWritable emptyFlagWritable =
+ new EnumSetWritable(emptyFlag, TestEnumSet.class);
DataOutputBuffer out = new DataOutputBuffer();
ObjectWritable.writeObject(out, emptyFlagWritable, emptyFlagWritable
.getClass(), null);
@@ -86,11 +88,12 @@ public class TestEnumSetWritable extends TestCase {
}
assertTrue(
- "Instantiate empty EnumSetWritable with no element type class providesd should throw exception.",
+ "Instantiation of empty EnumSetWritable with no element type class "
+ + "provided should throw exception",
gotException);
- EnumSetWritable nullFlagWritable = new EnumSetWritable(
- null, TestEnumSet.class);
+ EnumSetWritable nullFlagWritable =
+ new EnumSetWritable(null, TestEnumSet.class);
DataOutputBuffer out = new DataOutputBuffer();
ObjectWritable.writeObject(out, nullFlagWritable, nullFlagWritable
@@ -105,10 +108,54 @@ public class TestEnumSetWritable extends TestCase {
public EnumSetWritable testField;
public void testAvroReflect() throws Exception {
- String schema = "{\"type\":\"array\",\"items\":{\"type\":\"enum\",\"name\":\"TestEnumSet\",\"namespace\":\"org.apache.hadoop.io.TestEnumSetWritable$\",\"symbols\":[\"CREATE\",\"OVERWRITE\",\"APPEND\"]},\"java-class\":\"org.apache.hadoop.io.EnumSetWritable\"}";
+ String schema = "{\"type\":\"array\",\"items\":{\"type\":\"enum\","
+ + "\"name\":\"TestEnumSet\","
+ + "\"namespace\":\"org.apache.hadoop.io.TestEnumSetWritable$\","
+ + "\"symbols\":[\"CREATE\",\"OVERWRITE\",\"APPEND\"]},"
+ + "\"java-class\":\"org.apache.hadoop.io.EnumSetWritable\"}";
Type type =
TestEnumSetWritable.class.getField("testField").getGenericType();
AvroTestUtil.testReflect(nonEmptyFlagWritable, type, schema);
+ }
+
+ /**
+ * test {@link EnumSetWritable} equals() method
+ */
+ public void testEnumSetWritableEquals() {
+ EnumSetWritable eset1 = new EnumSetWritable(
+ EnumSet.of(TestEnumSet.APPEND, TestEnumSet.CREATE), TestEnumSet.class);
+ EnumSetWritable eset2 = new EnumSetWritable(
+ EnumSet.of(TestEnumSet.APPEND, TestEnumSet.CREATE), TestEnumSet.class);
+ assertTrue("testEnumSetWritableEquals error !!!", eset1.equals(eset2));
+ assertFalse("testEnumSetWritableEquals error !!!",
+ eset1.equals(new EnumSetWritable(EnumSet.of(
+ TestEnumSet.APPEND, TestEnumSet.CREATE, TestEnumSet.OVERWRITE),
+ TestEnumSet.class)));
+ assertTrue("testEnumSetWritableEquals getElementType error !!!", eset1
+ .getElementType().equals(TestEnumSet.class));
}
+
+ /**
+ * test {@code EnumSetWritable.write(DataOutputBuffer out)}
+ * and iteration by TestEnumSet through iterator().
+ */
+ public void testEnumSetWritableWriteRead() throws Exception {
+ EnumSetWritable srcSet = new EnumSetWritable(
+ EnumSet.of(TestEnumSet.APPEND, TestEnumSet.CREATE), TestEnumSet.class);
+ DataOutputBuffer out = new DataOutputBuffer();
+ srcSet.write(out);
+ EnumSetWritable dstSet = new EnumSetWritable();
+ DataInputBuffer in = new DataInputBuffer();
+ in.reset(out.getData(), out.getLength());
+ dstSet.readFields(in);
+
+ EnumSet result = dstSet.get();
+ Iterator dstIter = result.iterator();
+ Iterator srcIter = srcSet.iterator();
+ while (dstIter.hasNext() && srcIter.hasNext()) {
+ assertEquals("testEnumSetWritableWriteRead error !!!", dstIter.next(),
+ srcIter.next());
+ }
+ }
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMapFile.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMapFile.java
index f006d4f401..f6eff37fd6 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMapFile.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestMapFile.java
@@ -17,29 +17,592 @@
*/
package org.apache.hadoop.io;
+import java.io.File;
+import java.io.IOException;
+import java.io.InputStream;
+import java.io.OutputStream;
+
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
+import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
+import org.apache.hadoop.io.SequenceFile.CompressionType;
+import org.apache.hadoop.io.compress.CompressionCodec;
+import org.apache.hadoop.io.compress.CompressionInputStream;
+import org.apache.hadoop.io.compress.CompressionOutputStream;
+import org.apache.hadoop.io.compress.Compressor;
+import org.apache.hadoop.io.compress.Decompressor;
+import org.apache.hadoop.util.Progressable;
+import org.junit.Assert;
+import org.junit.Before;
+import org.junit.Test;
-import junit.framework.TestCase;
+import static org.junit.Assert.*;
-public class TestMapFile extends TestCase {
+import static org.mockito.Mockito.*;
+
+public class TestMapFile {
+
+ private static final Path TEST_DIR = new Path(
+ System.getProperty("test.build.data", "/tmp"),
+ TestMapFile.class.getSimpleName());
+
private static Configuration conf = new Configuration();
+ @Before
+ public void setup() throws Exception {
+ LocalFileSystem fs = FileSystem.getLocal(conf);
+ if (fs.exists(TEST_DIR) && !fs.delete(TEST_DIR, true)) {
+ Assert.fail("Can't clean up test root dir");
+ }
+ fs.mkdirs(TEST_DIR);
+ }
+
+ private static final Progressable defaultProgressable = new Progressable() {
+ @Override
+ public void progress() {
+ }
+ };
+
+ private static final CompressionCodec defaultCodec = new CompressionCodec() {
+ @Override
+ public CompressionOutputStream createOutputStream(OutputStream out)
+ throws IOException {
+ return null;
+ }
+
+ @Override
+ public CompressionOutputStream createOutputStream(OutputStream out,
+ Compressor compressor) throws IOException {
+ return null;
+ }
+
+ @Override
+ public Class extends Compressor> getCompressorType() {
+ return null;
+ }
+
+ @Override
+ public Compressor createCompressor() {
+ return null;
+ }
+
+ @Override
+ public CompressionInputStream createInputStream(InputStream in)
+ throws IOException {
+ return null;
+ }
+
+ @Override
+ public CompressionInputStream createInputStream(InputStream in,
+ Decompressor decompressor) throws IOException {
+ return null;
+ }
+
+ @Override
+ public Class extends Decompressor> getDecompressorType() {
+ return null;
+ }
+
+ @Override
+ public Decompressor createDecompressor() {
+ return null;
+ }
+
+ @Override
+ public String getDefaultExtension() {
+ return null;
+ }
+ };
+
+ private MapFile.Writer createWriter(String fileName,
+ Class extends WritableComparable>> keyClass,
+ Class extends Writable> valueClass) throws IOException {
+ Path dirName = new Path(TEST_DIR, fileName);
+ MapFile.Writer.setIndexInterval(conf, 4);
+ return new MapFile.Writer(conf, dirName, MapFile.Writer.keyClass(keyClass),
+ MapFile.Writer.valueClass(valueClass));
+ }
+
+ private MapFile.Reader createReader(String fileName,
+ Class extends WritableComparable>> keyClass) throws IOException {
+ Path dirName = new Path(TEST_DIR, fileName);
+ return new MapFile.Reader(dirName, conf,
+ MapFile.Reader.comparator(new WritableComparator(keyClass)));
+ }
+
+ /**
+ * test {@code MapFile.Reader.getClosest()} method
+ *
+ */
+ @Test
+ public void testGetClosestOnCurrentApi() throws Exception {
+ final String TEST_PREFIX = "testGetClosestOnCurrentApi.mapfile";
+ MapFile.Writer writer = createWriter(TEST_PREFIX, Text.class, Text.class);
+ int FIRST_KEY = 1;
+ // Test keys: 11,21,31,...,91
+ for (int i = FIRST_KEY; i < 100; i += 10) {
+ Text t = new Text(Integer.toString(i));
+ writer.append(t, t);
+ }
+ writer.close();
+
+ MapFile.Reader reader = createReader(TEST_PREFIX, Text.class);
+ Text key = new Text("55");
+ Text value = new Text();
+
+ // Test get closest with step forward
+ Text closest = (Text) reader.getClosest(key, value);
+ assertEquals(new Text("61"), closest);
+
+ // Test get closest with step back
+ closest = (Text) reader.getClosest(key, value, true);
+ assertEquals(new Text("51"), closest);
+
+ // Test get closest when we pass explicit key
+ final Text explicitKey = new Text("21");
+ closest = (Text) reader.getClosest(explicitKey, value);
+ assertEquals(new Text("21"), explicitKey);
+
+ // Test what happens at boundaries. Assert if searching a key that is
+ // less than first key in the mapfile, that the first key is returned.
+ key = new Text("00");
+ closest = (Text) reader.getClosest(key, value);
+ assertEquals(FIRST_KEY, Integer.parseInt(closest.toString()));
+
+ // Assert that null is returned if key is > last entry in mapfile.
+ key = new Text("92");
+ closest = (Text) reader.getClosest(key, value);
+ assertNull("Not null key in testGetClosestWithNewCode", closest);
+
+ // If we were looking for the key before, we should get the last key
+ closest = (Text) reader.getClosest(key, value, true);
+ assertEquals(new Text("91"), closest);
+ }
+
+ /**
+ * test {@code MapFile.Reader.midKey() } method
+ */
+ @Test
+ public void testMidKeyOnCurrentApi() throws Exception {
+ // Write a mapfile of simple data: keys are
+ final String TEST_PREFIX = "testMidKeyOnCurrentApi.mapfile";
+ MapFile.Writer writer = createWriter(TEST_PREFIX, IntWritable.class,
+ IntWritable.class);
+ // 0,1,....9
+ int SIZE = 10;
+ for (int i = 0; i < SIZE; i++)
+ writer.append(new IntWritable(i), new IntWritable(i));
+ writer.close();
+
+ MapFile.Reader reader = createReader(TEST_PREFIX, IntWritable.class);
+ assertEquals(new IntWritable((SIZE - 1) / 2), reader.midKey());
+ }
+
+ /**
+ * test {@code MapFile.Writer.rename()} method
+ */
+ @Test
+ public void testRename() {
+ final String NEW_FILE_NAME = "test-new.mapfile";
+ final String OLD_FILE_NAME = "test-old.mapfile";
+ try {
+ FileSystem fs = FileSystem.getLocal(conf);
+ MapFile.Writer writer = createWriter(OLD_FILE_NAME, IntWritable.class,
+ IntWritable.class);
+ writer.close();
+ MapFile.rename(fs, new Path(TEST_DIR, OLD_FILE_NAME).toString(),
+ new Path(TEST_DIR, NEW_FILE_NAME).toString());
+ MapFile.delete(fs, new Path(TEST_DIR, NEW_FILE_NAME).toString());
+ } catch (IOException ex) {
+ fail("testRename error " + ex);
+ }
+ }
+
+ /**
+ * test {@code MapFile.rename()}
+ * method with throwing {@code IOException}
+ */
+ @Test
+ public void testRenameWithException() {
+ final String ERROR_MESSAGE = "Can't rename file";
+ final String NEW_FILE_NAME = "test-new.mapfile";
+ final String OLD_FILE_NAME = "test-old.mapfile";
+ try {
+ FileSystem fs = FileSystem.getLocal(conf);
+ FileSystem spyFs = spy(fs);
+
+ MapFile.Writer writer = createWriter(OLD_FILE_NAME, IntWritable.class,
+ IntWritable.class);
+ writer.close();
+
+ Path oldDir = new Path(TEST_DIR, OLD_FILE_NAME);
+ Path newDir = new Path(TEST_DIR, NEW_FILE_NAME);
+ when(spyFs.rename(oldDir, newDir)).thenThrow(
+ new IOException(ERROR_MESSAGE));
+
+ MapFile.rename(spyFs, oldDir.toString(), newDir.toString());
+ fail("testRenameWithException no exception error !!!");
+ } catch (IOException ex) {
+ assertEquals("testRenameWithException invalid IOExceptionMessage !!!",
+ ex.getMessage(), ERROR_MESSAGE);
+ }
+ }
+
+ @Test
+ public void testRenameWithFalse() {
+ final String ERROR_MESSAGE = "Could not rename";
+ final String NEW_FILE_NAME = "test-new.mapfile";
+ final String OLD_FILE_NAME = "test-old.mapfile";
+ try {
+ FileSystem fs = FileSystem.getLocal(conf);
+ FileSystem spyFs = spy(fs);
+
+ MapFile.Writer writer = createWriter(OLD_FILE_NAME, IntWritable.class,
+ IntWritable.class);
+ writer.close();
+
+ Path oldDir = new Path(TEST_DIR, OLD_FILE_NAME);
+ Path newDir = new Path(TEST_DIR, NEW_FILE_NAME);
+ when(spyFs.rename(oldDir, newDir)).thenReturn(false);
+
+ MapFile.rename(spyFs, oldDir.toString(), newDir.toString());
+ fail("testRenameWithException no exception error !!!");
+ } catch (IOException ex) {
+ assertTrue("testRenameWithFalse invalid IOExceptionMessage error !!!", ex
+ .getMessage().startsWith(ERROR_MESSAGE));
+ }
+ }
+
+ /**
+ * test throwing {@code IOException} in {@code MapFile.Writer} constructor
+ */
+ @Test
+ public void testWriteWithFailDirCreation() {
+ String ERROR_MESSAGE = "Mkdirs failed to create directory";
+ Path dirName = new Path(TEST_DIR, "fail.mapfile");
+ MapFile.Writer writer = null;
+ try {
+ FileSystem fs = FileSystem.getLocal(conf);
+ FileSystem spyFs = spy(fs);
+ Path pathSpy = spy(dirName);
+ when(pathSpy.getFileSystem(conf)).thenReturn(spyFs);
+ when(spyFs.mkdirs(dirName)).thenReturn(false);
+
+ writer = new MapFile.Writer(conf, pathSpy,
+ MapFile.Writer.keyClass(IntWritable.class),
+ MapFile.Writer.valueClass(Text.class));
+ fail("testWriteWithFailDirCreation error !!!");
+ } catch (IOException ex) {
+ assertTrue("testWriteWithFailDirCreation ex error !!!", ex.getMessage()
+ .startsWith(ERROR_MESSAGE));
+ } finally {
+ if (writer != null)
+ try {
+ writer.close();
+ } catch (IOException e) {
+ }
+ }
+ }
+
+ /**
+ * test {@code MapFile.Reader.finalKey()} method
+ */
+ @Test
+ public void testOnFinalKey() {
+ final String TEST_METHOD_KEY = "testOnFinalKey.mapfile";
+ int SIZE = 10;
+ try {
+ MapFile.Writer writer = createWriter(TEST_METHOD_KEY, IntWritable.class,
+ IntWritable.class);
+ for (int i = 0; i < SIZE; i++)
+ writer.append(new IntWritable(i), new IntWritable(i));
+ writer.close();
+
+ MapFile.Reader reader = createReader(TEST_METHOD_KEY, IntWritable.class);
+ IntWritable expectedKey = new IntWritable(0);
+ reader.finalKey(expectedKey);
+ assertEquals("testOnFinalKey not same !!!", expectedKey, new IntWritable(
+ 9));
+ } catch (IOException ex) {
+ fail("testOnFinalKey error !!!");
+ }
+ }
+
+ /**
+ * test {@code MapFile.Writer} constructor with key, value
+ * and validate it with {@code keyClass(), valueClass()} methods
+ */
+ @Test
+ public void testKeyValueClasses() {
+ Class extends WritableComparable>> keyClass = IntWritable.class;
+ Class> valueClass = Text.class;
+ try {
+ createWriter("testKeyValueClasses.mapfile", IntWritable.class, Text.class);
+ assertNotNull("writer key class null error !!!",
+ MapFile.Writer.keyClass(keyClass));
+ assertNotNull("writer value class null error !!!",
+ MapFile.Writer.valueClass(valueClass));
+ } catch (IOException ex) {
+ fail(ex.getMessage());
+ }
+ }
+
+ /**
+ * test {@code MapFile.Reader.getClosest() } with wrong class key
+ */
+ @Test
+ public void testReaderGetClosest() throws Exception {
+ final String TEST_METHOD_KEY = "testReaderWithWrongKeyClass.mapfile";
+ try {
+ MapFile.Writer writer = createWriter(TEST_METHOD_KEY, IntWritable.class,
+ Text.class);
+
+ for (int i = 0; i < 10; i++)
+ writer.append(new IntWritable(i), new Text("value" + i));
+ writer.close();
+
+ MapFile.Reader reader = createReader(TEST_METHOD_KEY, Text.class);
+ reader.getClosest(new Text("2"), new Text(""));
+ fail("no excepted exception in testReaderWithWrongKeyClass !!!");
+ } catch (IOException ex) {
+ /* Should be thrown to pass the test */
+ }
+ }
+
+ /**
+ * test {@code MapFile.Writer.append() } with wrong key class
+ */
+ @Test
+ public void testReaderWithWrongValueClass() {
+ final String TEST_METHOD_KEY = "testReaderWithWrongValueClass.mapfile";
+ try {
+ MapFile.Writer writer = createWriter(TEST_METHOD_KEY, IntWritable.class,
+ Text.class);
+ writer.append(new IntWritable(0), new IntWritable(0));
+ fail("no excepted exception in testReaderWithWrongKeyClass !!!");
+ } catch (IOException ex) {
+ /* Should be thrown to pass the test */
+ }
+ }
+
+ /**
+ * test {@code MapFile.Reader.next(key, value)} for iteration.
+ */
+ @Test
+ public void testReaderKeyIteration() {
+ final String TEST_METHOD_KEY = "testReaderKeyIteration.mapfile";
+ int SIZE = 10;
+ int ITERATIONS = 5;
+ try {
+ MapFile.Writer writer = createWriter(TEST_METHOD_KEY, IntWritable.class,
+ Text.class);
+ int start = 0;
+ for (int i = 0; i < SIZE; i++)
+ writer.append(new IntWritable(i), new Text("Value:" + i));
+ writer.close();
+
+ MapFile.Reader reader = createReader(TEST_METHOD_KEY, IntWritable.class);
+ // test iteration
+ Writable startValue = new Text("Value:" + start);
+ int i = 0;
+ while (i++ < ITERATIONS) {
+ IntWritable key = new IntWritable(start);
+ Writable value = startValue;
+ while (reader.next(key, value)) {
+ assertNotNull(key);
+ assertNotNull(value);
+ }
+ reader.reset();
+ }
+ assertTrue("reader seek error !!!",
+ reader.seek(new IntWritable(SIZE / 2)));
+ assertFalse("reader seek error !!!",
+ reader.seek(new IntWritable(SIZE * 2)));
+ } catch (IOException ex) {
+ fail("reader seek error !!!");
+ }
+ }
+
+ /**
+ * test {@code MapFile.Writer.testFix} method
+ */
+ @Test
+ public void testFix() {
+ final String INDEX_LESS_MAP_FILE = "testFix.mapfile";
+ int PAIR_SIZE = 20;
+ try {
+ FileSystem fs = FileSystem.getLocal(conf);
+ Path dir = new Path(TEST_DIR, INDEX_LESS_MAP_FILE);
+ MapFile.Writer writer = createWriter(INDEX_LESS_MAP_FILE,
+ IntWritable.class, Text.class);
+ for (int i = 0; i < PAIR_SIZE; i++)
+ writer.append(new IntWritable(0), new Text("value"));
+ writer.close();
+
+ File indexFile = new File(".", "." + INDEX_LESS_MAP_FILE + "/index");
+ boolean isDeleted = false;
+ if (indexFile.exists())
+ isDeleted = indexFile.delete();
+
+ if (isDeleted)
+ assertTrue("testFix error !!!",
+ MapFile.fix(fs, dir, IntWritable.class, Text.class, true, conf) == PAIR_SIZE);
+ } catch (Exception ex) {
+ fail("testFix error !!!");
+ }
+ }
+ /**
+ * test all available constructor for {@code MapFile.Writer}
+ */
+ @Test
+ @SuppressWarnings("deprecation")
+ public void testDeprecatedConstructors() {
+ String path = new Path(TEST_DIR, "writes.mapfile").toString();
+ try {
+ FileSystem fs = FileSystem.getLocal(conf);
+ MapFile.Writer writer = new MapFile.Writer(conf, fs, path,
+ IntWritable.class, Text.class, CompressionType.RECORD);
+ assertNotNull(writer);
+ writer = new MapFile.Writer(conf, fs, path, IntWritable.class,
+ Text.class, CompressionType.RECORD, defaultProgressable);
+ assertNotNull(writer);
+ writer = new MapFile.Writer(conf, fs, path, IntWritable.class,
+ Text.class, CompressionType.RECORD, defaultCodec, defaultProgressable);
+ assertNotNull(writer);
+ writer = new MapFile.Writer(conf, fs, path,
+ WritableComparator.get(Text.class), Text.class);
+ assertNotNull(writer);
+ writer = new MapFile.Writer(conf, fs, path,
+ WritableComparator.get(Text.class), Text.class,
+ SequenceFile.CompressionType.RECORD);
+ assertNotNull(writer);
+ writer = new MapFile.Writer(conf, fs, path,
+ WritableComparator.get(Text.class), Text.class,
+ CompressionType.RECORD, defaultProgressable);
+ assertNotNull(writer);
+ writer.close();
+
+ MapFile.Reader reader = new MapFile.Reader(fs, path,
+ WritableComparator.get(IntWritable.class), conf);
+ assertNotNull(reader);
+ assertNotNull("reader key is null !!!", reader.getKeyClass());
+ assertNotNull("reader value in null", reader.getValueClass());
+
+ } catch (IOException e) {
+ fail(e.getMessage());
+ }
+ }
+
+ /**
+ * test {@code MapFile.Writer} constructor
+ * with IllegalArgumentException
+ *
+ */
+ @Test
+ public void testKeyLessWriterCreation() {
+ MapFile.Writer writer = null;
+ try {
+ writer = new MapFile.Writer(conf, TEST_DIR);
+ fail("fail in testKeyLessWriterCreation !!!");
+ } catch (IllegalArgumentException ex) {
+ } catch (Exception e) {
+ fail("fail in testKeyLessWriterCreation. Other ex !!!");
+ } finally {
+ if (writer != null)
+ try {
+ writer.close();
+ } catch (IOException e) {
+ }
+ }
+ }
+ /**
+ * test {@code MapFile.Writer} constructor with IOException
+ */
+ @Test
+ public void testPathExplosionWriterCreation() {
+ Path path = new Path(TEST_DIR, "testPathExplosionWriterCreation.mapfile");
+ String TEST_ERROR_MESSAGE = "Mkdirs failed to create directory "
+ + path.getName();
+ MapFile.Writer writer = null;
+ try {
+ FileSystem fsSpy = spy(FileSystem.get(conf));
+ Path pathSpy = spy(path);
+ when(fsSpy.mkdirs(path)).thenThrow(new IOException(TEST_ERROR_MESSAGE));
+
+ when(pathSpy.getFileSystem(conf)).thenReturn(fsSpy);
+
+ writer = new MapFile.Writer(conf, pathSpy,
+ MapFile.Writer.keyClass(IntWritable.class),
+ MapFile.Writer.valueClass(IntWritable.class));
+ fail("fail in testPathExplosionWriterCreation !!!");
+ } catch (IOException ex) {
+ assertEquals("testPathExplosionWriterCreation ex message error !!!",
+ ex.getMessage(), TEST_ERROR_MESSAGE);
+ } catch (Exception e) {
+ fail("fail in testPathExplosionWriterCreation. Other ex !!!");
+ } finally {
+ if (writer != null)
+ try {
+ writer.close();
+ } catch (IOException e) {
+ }
+ }
+ }
+
+ /**
+ * test {@code MapFile.Writer.append} method with desc order
+ */
+ @Test
+ public void testDescOrderWithThrowExceptionWriterAppend() {
+ try {
+ MapFile.Writer writer = createWriter(".mapfile", IntWritable.class,
+ Text.class);
+ writer.append(new IntWritable(2), new Text("value: " + 1));
+ writer.append(new IntWritable(2), new Text("value: " + 2));
+ writer.append(new IntWritable(2), new Text("value: " + 4));
+ writer.append(new IntWritable(1), new Text("value: " + 3));
+ fail("testDescOrderWithThrowExceptionWriterAppend not expected exception error !!!");
+ } catch (IOException ex) {
+ } catch (Exception e) {
+ fail("testDescOrderWithThrowExceptionWriterAppend other ex throw !!!");
+ }
+ }
+
+ @Test
+ public void testMainMethodMapFile() {
+ String path = new Path(TEST_DIR, "mainMethodMapFile.mapfile").toString();
+ String inFile = "mainMethodMapFile.mapfile";
+ String outFile = "mainMethodMapFile.mapfile";
+ String[] args = { path, outFile };
+ try {
+ MapFile.Writer writer = createWriter(inFile, IntWritable.class,
+ Text.class);
+ writer.append(new IntWritable(1), new Text("test_text1"));
+ writer.append(new IntWritable(2), new Text("test_text2"));
+ writer.close();
+ MapFile.main(args);
+ } catch (Exception ex) {
+ fail("testMainMethodMapFile error !!!");
+ }
+ }
+
/**
* Test getClosest feature.
+ *
* @throws Exception
*/
+ @Test
+ @SuppressWarnings("deprecation")
public void testGetClosest() throws Exception {
- // Write a mapfile of simple data: keys are
- Path dirName = new Path(System.getProperty("test.build.data",".") +
- getName() + ".mapfile");
+ // Write a mapfile of simple data: keys are
+ Path dirName = new Path(TEST_DIR, "testGetClosest.mapfile");
FileSystem fs = FileSystem.getLocal(conf);
Path qualifiedDirName = fs.makeQualified(dirName);
// Make an index entry for every third insertion.
MapFile.Writer.setIndexInterval(conf, 3);
MapFile.Writer writer = new MapFile.Writer(conf, fs,
- qualifiedDirName.toString(), Text.class, Text.class);
+ qualifiedDirName.toString(), Text.class, Text.class);
// Assert that the index interval is 1
assertEquals(3, writer.getIndexInterval());
// Add entries up to 100 in intervals of ten.
@@ -51,74 +614,84 @@ public class TestMapFile extends TestCase {
}
writer.close();
// Now do getClosest on created mapfile.
- MapFile.Reader reader = new MapFile.Reader(fs, qualifiedDirName.toString(),
- conf);
+ MapFile.Reader reader = new MapFile.Reader(qualifiedDirName, conf);
+ try {
Text key = new Text("55");
Text value = new Text();
- Text closest = (Text)reader.getClosest(key, value);
+ Text closest = (Text) reader.getClosest(key, value);
// Assert that closest after 55 is 60
assertEquals(new Text("60"), closest);
// Get closest that falls before the passed key: 50
- closest = (Text)reader.getClosest(key, value, true);
+ closest = (Text) reader.getClosest(key, value, true);
assertEquals(new Text("50"), closest);
// Test get closest when we pass explicit key
final Text TWENTY = new Text("20");
- closest = (Text)reader.getClosest(TWENTY, value);
+ closest = (Text) reader.getClosest(TWENTY, value);
assertEquals(TWENTY, closest);
- closest = (Text)reader.getClosest(TWENTY, value, true);
+ closest = (Text) reader.getClosest(TWENTY, value, true);
assertEquals(TWENTY, closest);
- // Test what happens at boundaries. Assert if searching a key that is
+ // Test what happens at boundaries. Assert if searching a key that is
// less than first key in the mapfile, that the first key is returned.
key = new Text("00");
- closest = (Text)reader.getClosest(key, value);
+ closest = (Text) reader.getClosest(key, value);
assertEquals(FIRST_KEY, Integer.parseInt(closest.toString()));
-
- // If we're looking for the first key before, and we pass in a key before
+
+ // If we're looking for the first key before, and we pass in a key before
// the first key in the file, we should get null
- closest = (Text)reader.getClosest(key, value, true);
+ closest = (Text) reader.getClosest(key, value, true);
assertNull(closest);
-
+
// Assert that null is returned if key is > last entry in mapfile.
key = new Text("99");
- closest = (Text)reader.getClosest(key, value);
+ closest = (Text) reader.getClosest(key, value);
assertNull(closest);
// If we were looking for the key before, we should get the last key
- closest = (Text)reader.getClosest(key, value, true);
+ closest = (Text) reader.getClosest(key, value, true);
assertEquals(new Text("90"), closest);
+ } finally {
+ reader.close();
+ }
}
+ @Test
+ @SuppressWarnings("deprecation")
public void testMidKey() throws Exception {
- // Write a mapfile of simple data: keys are
- Path dirName = new Path(System.getProperty("test.build.data",".") +
- getName() + ".mapfile");
+ // Write a mapfile of simple data: keys are
+ Path dirName = new Path(TEST_DIR, "testMidKey.mapfile");
FileSystem fs = FileSystem.getLocal(conf);
Path qualifiedDirName = fs.makeQualified(dirName);
-
+
MapFile.Writer writer = new MapFile.Writer(conf, fs,
- qualifiedDirName.toString(), IntWritable.class, IntWritable.class);
+ qualifiedDirName.toString(), IntWritable.class, IntWritable.class);
writer.append(new IntWritable(1), new IntWritable(1));
writer.close();
// Now do getClosest on created mapfile.
- MapFile.Reader reader = new MapFile.Reader(fs, qualifiedDirName.toString(),
- conf);
- assertEquals(new IntWritable(1), reader.midKey());
+ MapFile.Reader reader = new MapFile.Reader(qualifiedDirName, conf);
+ try {
+ assertEquals(new IntWritable(1), reader.midKey());
+ } finally {
+ reader.close();
+ }
}
-
+ @Test
+ @SuppressWarnings("deprecation")
public void testMidKeyEmpty() throws Exception {
- // Write a mapfile of simple data: keys are
- Path dirName = new Path(System.getProperty("test.build.data",".") +
- getName() + ".mapfile");
+ // Write a mapfile of simple data: keys are
+ Path dirName = new Path(TEST_DIR, "testMidKeyEmpty.mapfile");
FileSystem fs = FileSystem.getLocal(conf);
Path qualifiedDirName = fs.makeQualified(dirName);
-
+
MapFile.Writer writer = new MapFile.Writer(conf, fs,
- qualifiedDirName.toString(), IntWritable.class, IntWritable.class);
+ qualifiedDirName.toString(), IntWritable.class, IntWritable.class);
writer.close();
// Now do getClosest on created mapfile.
- MapFile.Reader reader = new MapFile.Reader(fs, qualifiedDirName.toString(),
- conf);
- assertEquals(null, reader.midKey());
+ MapFile.Reader reader = new MapFile.Reader(qualifiedDirName, conf);
+ try {
+ assertEquals(null, reader.midKey());
+ } finally {
+ reader.close();
+ }
}
}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSetFile.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSetFile.java
index 70d02e013f..a248171c37 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSetFile.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSetFile.java
@@ -20,6 +20,8 @@ package org.apache.hadoop.io;
import java.io.*;
import java.util.*;
+import java.util.concurrent.atomic.AtomicReference;
+
import junit.framework.TestCase;
import org.apache.commons.logging.*;
@@ -51,6 +53,39 @@ public class TestSetFile extends TestCase {
fs.close();
}
}
+
+ /**
+ * test {@code SetFile.Reader} methods
+ * next(), get() in combination
+ */
+ public void testSetFileAccessMethods() {
+ try {
+ FileSystem fs = FileSystem.getLocal(conf);
+ int size = 10;
+ writeData(fs, size);
+ SetFile.Reader reader = createReader(fs);
+ assertTrue("testSetFileWithConstruction1 error !!!", reader.next(new IntWritable(0)));
+ // don't know why reader.get(i) return i+1
+ assertEquals("testSetFileWithConstruction2 error !!!", new IntWritable(size/2 + 1), reader.get(new IntWritable(size/2)));
+ assertNull("testSetFileWithConstruction3 error !!!", reader.get(new IntWritable(size*2)));
+ } catch (Exception ex) {
+ fail("testSetFileWithConstruction error !!!");
+ }
+ }
+
+ private SetFile.Reader createReader(FileSystem fs) throws IOException {
+ return new SetFile.Reader(fs, FILE,
+ WritableComparator.get(IntWritable.class), conf);
+ }
+
+ @SuppressWarnings("deprecation")
+ private void writeData(FileSystem fs, int elementSize) throws IOException {
+ MapFile.delete(fs, FILE);
+ SetFile.Writer writer = new SetFile.Writer(fs, FILE, IntWritable.class);
+ for (int i = 0; i < elementSize; i++)
+ writer.append(new IntWritable(i));
+ writer.close();
+ }
private static RandomDatum[] generate(int count) {
LOG.info("generating " + count + " records in memory");
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java
index df9fb54032..0fbe46a5b3 100644
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java
+++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestText.java
@@ -19,11 +19,12 @@
package org.apache.hadoop.io;
import junit.framework.TestCase;
-
import java.io.IOException;
+import java.nio.BufferUnderflowException;
import java.nio.ByteBuffer;
import java.nio.charset.CharacterCodingException;
import java.util.Random;
+import com.google.common.primitives.Bytes;
/** Unit tests for LargeUTF8. */
public class TestText extends TestCase {
@@ -321,7 +322,81 @@ public class TestText extends TestCase {
(new Text("foo"),
"{\"type\":\"string\",\"java-class\":\"org.apache.hadoop.io.Text\"}");
}
-
+
+ /**
+ *
+ */
+ public void testCharAt() {
+ String line = "adsawseeeeegqewgasddga";
+ Text text = new Text(line);
+ for (int i = 0; i < line.length(); i++) {
+ assertTrue("testCharAt error1 !!!", text.charAt(i) == line.charAt(i));
+ }
+ assertEquals("testCharAt error2 !!!", -1, text.charAt(-1));
+ assertEquals("testCharAt error3 !!!", -1, text.charAt(100));
+ }
+
+ /**
+ * test {@code Text} readFields/write operations
+ */
+ public void testReadWriteOperations() {
+ String line = "adsawseeeeegqewgasddga";
+ byte[] inputBytes = line.getBytes();
+ inputBytes = Bytes.concat(new byte[] {(byte)22}, inputBytes);
+
+ DataInputBuffer in = new DataInputBuffer();
+ DataOutputBuffer out = new DataOutputBuffer();
+ Text text = new Text(line);
+ try {
+ in.reset(inputBytes, inputBytes.length);
+ text.readFields(in);
+ } catch(Exception ex) {
+ fail("testReadFields error !!!");
+ }
+ try {
+ text.write(out);
+ } catch(IOException ex) {
+ } catch(Exception ex) {
+ fail("testReadWriteOperations error !!!");
+ }
+ }
+
+ /**
+ * test {@code Text.bytesToCodePoint(bytes) }
+ * with {@code BufferUnderflowException}
+ *
+ */
+ public void testBytesToCodePoint() {
+ try {
+ ByteBuffer bytes = ByteBuffer.wrap(new byte[] {-2, 45, 23, 12, 76, 89});
+ Text.bytesToCodePoint(bytes);
+ assertTrue("testBytesToCodePoint error !!!", bytes.position() == 6 );
+ } catch (BufferUnderflowException ex) {
+ fail("testBytesToCodePoint unexp exception");
+ } catch (Exception e) {
+ fail("testBytesToCodePoint unexp exception");
+ }
+ }
+
+ public void testbytesToCodePointWithInvalidUTF() {
+ try {
+ Text.bytesToCodePoint(ByteBuffer.wrap(new byte[] {-2}));
+ fail("testbytesToCodePointWithInvalidUTF error unexp exception !!!");
+ } catch (BufferUnderflowException ex) {
+ } catch(Exception e) {
+ fail("testbytesToCodePointWithInvalidUTF error unexp exception !!!");
+ }
+ }
+
+ public void testUtf8Length() {
+ assertEquals("testUtf8Length1 error !!!", 1, Text.utf8Length(new String(new char[]{(char)1})));
+ assertEquals("testUtf8Length127 error !!!", 1, Text.utf8Length(new String(new char[]{(char)127})));
+ assertEquals("testUtf8Length128 error !!!", 2, Text.utf8Length(new String(new char[]{(char)128})));
+ assertEquals("testUtf8Length193 error !!!", 2, Text.utf8Length(new String(new char[]{(char)193})));
+ assertEquals("testUtf8Length225 error !!!", 2, Text.utf8Length(new String(new char[]{(char)225})));
+ assertEquals("testUtf8Length254 error !!!", 2, Text.utf8Length(new String(new char[]{(char)254})));
+ }
+
public static void main(String[] args) throws Exception
{
TestText test = new TestText("main");