diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt
index 91e2f9f61d..bb34392b66 100644
--- a/hadoop-common-project/hadoop-common/CHANGES.txt
+++ b/hadoop-common-project/hadoop-common/CHANGES.txt
@@ -364,6 +364,8 @@ Release 2.0.4-beta - UNRELEASED
HADOOP-9246 Execution phase for hadoop-maven-plugin should be
process-resources (Karthik Kambatla and Chris Nauroth via jlowe)
+ HADOOP-9297. remove old record IO generation and tests. (tucu)
+
Release 2.0.3-alpha - 2013-02-06
INCOMPATIBLE CHANGES
diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml
index 9c03aa3dd2..1acd08b82c 100644
--- a/hadoop-common-project/hadoop-common/pom.xml
+++ b/hadoop-common-project/hadoop-common/pom.xml
@@ -376,38 +376,6 @@
org.apache.maven.plugins
maven-antrun-plugin
-
- create-recordcc-generated-sources-directory
- initialize
-
- run
-
-
-
-
-
-
-
-
- generate-test-sources
- generate-test-sources
-
- run
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
create-log-dir
process-test-resources
@@ -466,7 +434,7 @@
build-helper-maven-plugin
- add-recordcc-test-source
+ add-avro-test-source
generate-sources
add-test-source
diff --git a/hadoop-common-project/hadoop-common/src/test/ddl/buffer.jr b/hadoop-common-project/hadoop-common/src/test/ddl/buffer.jr
deleted file mode 100644
index a6094641e9..0000000000
--- a/hadoop-common-project/hadoop-common/src/test/ddl/buffer.jr
+++ /dev/null
@@ -1,23 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-module org.apache.hadoop.record {
- class RecBuffer {
- buffer data;
- }
-}
-
diff --git a/hadoop-common-project/hadoop-common/src/test/ddl/int.jr b/hadoop-common-project/hadoop-common/src/test/ddl/int.jr
deleted file mode 100644
index 8068bf3269..0000000000
--- a/hadoop-common-project/hadoop-common/src/test/ddl/int.jr
+++ /dev/null
@@ -1,23 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-module org.apache.hadoop.record {
- class RecInt {
- int data;
- }
-}
-
diff --git a/hadoop-common-project/hadoop-common/src/test/ddl/string.jr b/hadoop-common-project/hadoop-common/src/test/ddl/string.jr
deleted file mode 100644
index 94abdf5854..0000000000
--- a/hadoop-common-project/hadoop-common/src/test/ddl/string.jr
+++ /dev/null
@@ -1,23 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-module org.apache.hadoop.record {
- class RecString {
- ustring data;
- }
-}
-
diff --git a/hadoop-common-project/hadoop-common/src/test/ddl/test.jr b/hadoop-common-project/hadoop-common/src/test/ddl/test.jr
deleted file mode 100644
index b7f44d8bb1..0000000000
--- a/hadoop-common-project/hadoop-common/src/test/ddl/test.jr
+++ /dev/null
@@ -1,63 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-module org.apache.hadoop.record {
- class RecRecord0 {
- ustring stringVal;
- }
-
- class RecRecord1 {
- boolean boolVal;
- byte byteVal;
- int intVal;
- long longVal;
- float floatVal; // testing inline comment
- double doubleVal; /* testing comment */
- ustring stringVal; /* testing multi-line
- * comment */
- buffer bufferVal; // testing another // inline comment
- vector vectorVal;
- map mapVal;
- RecRecord0 recordVal;
- }
-
- class RecRecordOld {
- ustring name;
- vector ivec;
- vector> svec;
- RecRecord0 inner;
- vector>> strvec;
- float i1;
- map map1;
- vector> mvec1;
- vector> mvec2;
- }
-
- /* RecRecordNew is a lot like RecRecordOld. Helps test for versioning. */
- class RecRecordNew {
- ustring name2;
- RecRecord0 inner;
- vector ivec;
- vector> svec;
- vector>> strvec;
- int i1;
- map map1;
- vector> mvec2;
- }
-
-}
-
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/FromCpp.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/FromCpp.java
deleted file mode 100644
index aeb68ea1de..0000000000
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/FromCpp.java
+++ /dev/null
@@ -1,122 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.record;
-
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.TreeMap;
-import junit.framework.*;
-
-/**
- */
-public class FromCpp extends TestCase {
-
- public FromCpp(String testName) {
- super(testName);
- }
-
- @Override
- protected void setUp() throws Exception {
- }
-
- @Override
- protected void tearDown() throws Exception {
- }
-
- public void testBinary() {
- File tmpfile;
- try {
- tmpfile = new File("/temp/hadooptmp.dat");
- RecRecord1 r1 = new RecRecord1();
- r1.setBoolVal(true);
- r1.setByteVal((byte)0x66);
- r1.setFloatVal(3.145F);
- r1.setDoubleVal(1.5234);
- r1.setIntVal(4567);
- r1.setLongVal(0x5a5a5a5a5a5aL);
- r1.setStringVal("random text");
- r1.setBufferVal(new Buffer());
- r1.setVectorVal(new ArrayList());
- r1.setMapVal(new TreeMap());
- FileInputStream istream = new FileInputStream(tmpfile);
- BinaryRecordInput in = new BinaryRecordInput(istream);
- RecRecord1 r2 = new RecRecord1();
- r2.deserialize(in, "");
- istream.close();
- assertTrue(r1.equals(r2));
- } catch (IOException ex) {
- ex.printStackTrace();
- }
- }
-
- public void testCsv() {
- File tmpfile;
- try {
- tmpfile = new File("/temp/hadooptmp.txt");
- RecRecord1 r1 = new RecRecord1();
- r1.setBoolVal(true);
- r1.setByteVal((byte)0x66);
- r1.setFloatVal(3.145F);
- r1.setDoubleVal(1.5234);
- r1.setIntVal(4567);
- r1.setLongVal(0x5a5a5a5a5a5aL);
- r1.setStringVal("random text");
- r1.setBufferVal(new Buffer());
- r1.setVectorVal(new ArrayList());
- r1.setMapVal(new TreeMap());
- FileInputStream istream = new FileInputStream(tmpfile);
- CsvRecordInput in = new CsvRecordInput(istream);
- RecRecord1 r2 = new RecRecord1();
- r2.deserialize(in, "");
- istream.close();
- assertTrue(r1.equals(r2));
- } catch (IOException ex) {
- ex.printStackTrace();
- }
- }
-
- public void testXml() {
- File tmpfile;
- try {
- tmpfile = new File("/temp/hadooptmp.xml");
- RecRecord1 r1 = new RecRecord1();
- r1.setBoolVal(true);
- r1.setByteVal((byte)0x66);
- r1.setFloatVal(3.145F);
- r1.setDoubleVal(1.5234);
- r1.setIntVal(4567);
- r1.setLongVal(0x5a5a5a5a5a5aL);
- r1.setStringVal("random text");
- r1.setBufferVal(new Buffer());
- r1.setVectorVal(new ArrayList());
- r1.setMapVal(new TreeMap());
- FileInputStream istream = new FileInputStream(tmpfile);
- XmlRecordInput in = new XmlRecordInput(istream);
- RecRecord1 r2 = new RecRecord1();
- r2.deserialize(in, "");
- istream.close();
- assertTrue(r1.equals(r2));
- } catch (IOException ex) {
- ex.printStackTrace();
- }
- }
-
-}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/RecordBench.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/RecordBench.java
deleted file mode 100644
index 816d69ee26..0000000000
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/RecordBench.java
+++ /dev/null
@@ -1,311 +0,0 @@
-/*
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.record;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.IOException;
-import java.lang.reflect.Method;
-import java.util.Random;
-
-/**
- * Benchmark for various types of serializations
- */
-public class RecordBench {
-
- private static class Times {
- long init;
- long serialize;
- long deserialize;
- long write;
- long readFields;
- };
-
- private static final long SEED = 0xDEADBEEFL;
- private static final Random rand = new Random();
-
- /** Do not allow to create a new instance of RecordBench */
- private RecordBench() {}
-
- private static void initBuffers(Record[] buffers) {
- final int BUFLEN = 32;
- for (int idx = 0; idx < buffers.length; idx++) {
- buffers[idx] = new RecBuffer();
- int buflen = rand.nextInt(BUFLEN);
- byte[] bytes = new byte[buflen];
- rand.nextBytes(bytes);
- ((RecBuffer)buffers[idx]).setData(new Buffer(bytes));
- }
- }
-
- private static void initStrings(Record[] strings) {
- final int STRLEN = 32;
- for (int idx = 0; idx < strings.length; idx++) {
- strings[idx] = new RecString();
- int strlen = rand.nextInt(STRLEN);
- StringBuilder sb = new StringBuilder(strlen);
- for (int ich = 0; ich < strlen; ich++) {
- int cpt = 0;
- while (true) {
- cpt = rand.nextInt(0x10FFFF+1);
- if (Utils.isValidCodePoint(cpt)) {
- break;
- }
- }
- sb.appendCodePoint(cpt);
- }
- ((RecString)strings[idx]).setData(sb.toString());
- }
- }
-
- private static void initInts(Record[] ints) {
- for (int idx = 0; idx < ints.length; idx++) {
- ints[idx] = new RecInt();
- ((RecInt)ints[idx]).setData(rand.nextInt());
- }
- }
-
- private static Record[] makeArray(String type, int numRecords, Times times) {
- Method init = null;
- try {
- init = RecordBench.class.getDeclaredMethod("init"+
- toCamelCase(type) + "s",
- new Class[] {Record[].class});
- } catch (NoSuchMethodException ex) {
- throw new RuntimeException(ex);
- }
-
- Record[] records = new Record[numRecords];
- times.init = System.nanoTime();
- try {
- init.invoke(null, new Object[]{records});
- } catch (Exception ex) {
- throw new RuntimeException(ex);
- }
- times.init = System.nanoTime() - times.init;
- return records;
- }
-
- private static void runBinaryBench(String type, int numRecords, Times times)
- throws IOException {
- Record[] records = makeArray(type, numRecords, times);
- ByteArrayOutputStream bout = new ByteArrayOutputStream();
- BinaryRecordOutput rout = new BinaryRecordOutput(bout);
- DataOutputStream dout = new DataOutputStream(bout);
-
- for(int idx = 0; idx < numRecords; idx++) {
- records[idx].serialize(rout);
- }
- bout.reset();
-
- times.serialize = System.nanoTime();
- for(int idx = 0; idx < numRecords; idx++) {
- records[idx].serialize(rout);
- }
- times.serialize = System.nanoTime() - times.serialize;
-
- byte[] serialized = bout.toByteArray();
- ByteArrayInputStream bin = new ByteArrayInputStream(serialized);
- BinaryRecordInput rin = new BinaryRecordInput(bin);
-
- times.deserialize = System.nanoTime();
- for(int idx = 0; idx < numRecords; idx++) {
- records[idx].deserialize(rin);
- }
- times.deserialize = System.nanoTime() - times.deserialize;
-
- bout.reset();
-
- times.write = System.nanoTime();
- for(int idx = 0; idx < numRecords; idx++) {
- records[idx].write(dout);
- }
- times.write = System.nanoTime() - times.write;
-
- bin.reset();
- DataInputStream din = new DataInputStream(bin);
-
- times.readFields = System.nanoTime();
- for(int idx = 0; idx < numRecords; idx++) {
- records[idx].readFields(din);
- }
- times.readFields = System.nanoTime() - times.readFields;
- }
-
- private static void runCsvBench(String type, int numRecords, Times times)
- throws IOException {
- Record[] records = makeArray(type, numRecords, times);
- ByteArrayOutputStream bout = new ByteArrayOutputStream();
- CsvRecordOutput rout = new CsvRecordOutput(bout);
-
- for(int idx = 0; idx < numRecords; idx++) {
- records[idx].serialize(rout);
- }
- bout.reset();
-
- times.serialize = System.nanoTime();
- for(int idx = 0; idx < numRecords; idx++) {
- records[idx].serialize(rout);
- }
- times.serialize = System.nanoTime() - times.serialize;
-
- byte[] serialized = bout.toByteArray();
- ByteArrayInputStream bin = new ByteArrayInputStream(serialized);
- CsvRecordInput rin = new CsvRecordInput(bin);
-
- times.deserialize = System.nanoTime();
- for(int idx = 0; idx < numRecords; idx++) {
- records[idx].deserialize(rin);
- }
- times.deserialize = System.nanoTime() - times.deserialize;
- }
-
- private static void runXmlBench(String type, int numRecords, Times times)
- throws IOException {
- Record[] records = makeArray(type, numRecords, times);
- ByteArrayOutputStream bout = new ByteArrayOutputStream();
- XmlRecordOutput rout = new XmlRecordOutput(bout);
-
- for(int idx = 0; idx < numRecords; idx++) {
- records[idx].serialize(rout);
- }
- bout.reset();
-
- bout.write("\n".getBytes());
-
- times.serialize = System.nanoTime();
- for(int idx = 0; idx < numRecords; idx++) {
- records[idx].serialize(rout);
- }
- times.serialize = System.nanoTime() - times.serialize;
-
- bout.write(" \n".getBytes());
-
- byte[] serialized = bout.toByteArray();
- ByteArrayInputStream bin = new ByteArrayInputStream(serialized);
-
- times.deserialize = System.nanoTime();
- XmlRecordInput rin = new XmlRecordInput(bin);
- for(int idx = 0; idx < numRecords; idx++) {
- records[idx].deserialize(rin);
- }
- times.deserialize = System.nanoTime() - times.deserialize;
- }
-
- private static void printTimes(String type,
- String format,
- int numRecords,
- Times times) {
- System.out.println("Type: " + type + " Format: " + format +
- " #Records: "+numRecords);
- if (times.init != 0) {
- System.out.println("Initialization Time (Per record) : "+
- times.init/numRecords + " Nanoseconds");
- }
-
- if (times.serialize != 0) {
- System.out.println("Serialization Time (Per Record) : "+
- times.serialize/numRecords + " Nanoseconds");
- }
-
- if (times.deserialize != 0) {
- System.out.println("Deserialization Time (Per Record) : "+
- times.deserialize/numRecords + " Nanoseconds");
- }
-
- if (times.write != 0) {
- System.out.println("Write Time (Per Record) : "+
- times.write/numRecords + " Nanoseconds");
- }
-
- if (times.readFields != 0) {
- System.out.println("ReadFields Time (Per Record) : "+
- times.readFields/numRecords + " Nanoseconds");
- }
-
- System.out.println();
- }
-
- private static String toCamelCase(String inp) {
- char firstChar = inp.charAt(0);
- if (Character.isLowerCase(firstChar)) {
- return ""+Character.toUpperCase(firstChar) + inp.substring(1);
- }
- return inp;
- }
-
- private static void exitOnError() {
- String usage = "RecordBench {buffer|string|int}"+
- " {binary|csv|xml} ";
- System.out.println(usage);
- System.exit(1);
- }
-
- /**
- * @param args the command line arguments
- */
- public static void main(String[] args) throws IOException {
- String version = "RecordBench v0.1";
- System.out.println(version+"\n");
-
- if (args.length != 3) {
- exitOnError();
- }
-
- String typeName = args[0];
- String format = args[1];
- int numRecords = Integer.decode(args[2]).intValue();
-
- Method bench = null;
- try {
- bench = RecordBench.class.getDeclaredMethod("run"+
- toCamelCase(format) + "Bench",
- new Class[] {String.class, Integer.TYPE, Times.class});
- } catch (NoSuchMethodException ex) {
- ex.printStackTrace();
- exitOnError();
- }
-
- if (numRecords < 0) {
- exitOnError();
- }
-
- // dry run
- rand.setSeed(SEED);
- Times times = new Times();
- try {
- bench.invoke(null, new Object[] {typeName, numRecords, times});
- } catch (Exception ex) {
- ex.printStackTrace();
- System.exit(1);
- }
-
- // timed run
- rand.setSeed(SEED);
- try {
- bench.invoke(null, new Object[] {typeName, numRecords, times});
- } catch (Exception ex) {
- ex.printStackTrace();
- System.exit(1);
- }
- printTimes(typeName, format, numRecords, times);
- }
-}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/TestBuffer.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/TestBuffer.java
deleted file mode 100644
index 3012fa6ff4..0000000000
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/TestBuffer.java
+++ /dev/null
@@ -1,124 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.record;
-
-import junit.framework.*;
-
-/**
- * A Unit test for Record I/O Buffer class
- */
-public class TestBuffer extends TestCase {
-
- public TestBuffer(String testName) {
- super(testName);
- }
-
- /**
- * Test of set method, of class org.apache.hadoop.record.Buffer.
- */
- public void testSet() {
- final byte[] bytes = new byte[10];
- final Buffer instance = new Buffer();
-
- instance.set(bytes);
-
- assertEquals("set failed", bytes, instance.get());
- }
-
- /**
- * Test of copy method, of class org.apache.hadoop.record.Buffer.
- */
- public void testCopy() {
- final byte[] bytes = new byte[10];
- final int offset = 6;
- final int length = 3;
- for (int idx = 0; idx < 10; idx ++) {
- bytes[idx] = (byte) idx;
- }
- final Buffer instance = new Buffer();
-
- instance.copy(bytes, offset, length);
-
- assertEquals("copy failed", 3, instance.getCapacity());
- assertEquals("copy failed", 3, instance.get().length);
- for (int idx = 0; idx < 3; idx++) {
- assertEquals("Buffer content corrupted", idx+6, instance.get()[idx]);
- }
- }
-
- /**
- * Test of getCount method, of class org.apache.hadoop.record.Buffer.
- */
- public void testGetCount() {
- final Buffer instance = new Buffer();
-
- final int expResult = 0;
- final int result = instance.getCount();
- assertEquals("getSize failed", expResult, result);
- }
-
- /**
- * Test of getCapacity method, of class org.apache.hadoop.record.Buffer.
- */
- public void testGetCapacity() {
- final Buffer instance = new Buffer();
-
- final int expResult = 0;
- final int result = instance.getCapacity();
- assertEquals("getCapacity failed", expResult, result);
-
- instance.setCapacity(100);
- assertEquals("setCapacity failed", 100, instance.getCapacity());
- }
-
- /**
- * Test of truncate method, of class org.apache.hadoop.record.Buffer.
- */
- public void testTruncate() {
- final Buffer instance = new Buffer();
- instance.setCapacity(100);
- assertEquals("setCapacity failed", 100, instance.getCapacity());
-
- instance.truncate();
- assertEquals("truncate failed", 0, instance.getCapacity());
- }
-
- /**
- * Test of append method, of class org.apache.hadoop.record.Buffer.
- */
- public void testAppend() {
- final byte[] bytes = new byte[100];
- final int offset = 0;
- final int length = 100;
- for (int idx = 0; idx < 100; idx++) {
- bytes[idx] = (byte) (100-idx);
- }
-
- final Buffer instance = new Buffer();
-
- instance.append(bytes, offset, length);
-
- assertEquals("Buffer size mismatch", 100, instance.getCount());
-
- for (int idx = 0; idx < 100; idx++) {
- assertEquals("Buffer contents corrupted", 100-idx, instance.get()[idx]);
- }
-
- }
-}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/TestRecordIO.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/TestRecordIO.java
deleted file mode 100644
index 38eb9a0761..0000000000
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/TestRecordIO.java
+++ /dev/null
@@ -1,201 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.record;
-
-import java.io.IOException;
-import junit.framework.*;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.util.ArrayList;
-import java.util.TreeMap;
-
-/**
- */
-public class TestRecordIO extends TestCase {
-
- public TestRecordIO(String testName) {
- super(testName);
- }
-
- @Override
- protected void setUp() throws Exception {
- }
-
- @Override
- protected void tearDown() throws Exception {
- }
-
- public void testBinary() {
- File tmpfile;
- try {
- tmpfile = File.createTempFile("hadooprec", ".dat");
- FileOutputStream ostream = new FileOutputStream(tmpfile);
- BinaryRecordOutput out = new BinaryRecordOutput(ostream);
- RecRecord1 r1 = new RecRecord1();
- r1.setBoolVal(true);
- r1.setByteVal((byte)0x66);
- r1.setFloatVal(3.145F);
- r1.setDoubleVal(1.5234);
- r1.setIntVal(-4567);
- r1.setLongVal(-2367L);
- r1.setStringVal("random text");
- r1.setBufferVal(new Buffer());
- r1.setVectorVal(new ArrayList());
- r1.setMapVal(new TreeMap());
- RecRecord0 r0 = new RecRecord0();
- r0.setStringVal("other random text");
- r1.setRecordVal(r0);
- r1.serialize(out, "");
- ostream.close();
- FileInputStream istream = new FileInputStream(tmpfile);
- BinaryRecordInput in = new BinaryRecordInput(istream);
- RecRecord1 r2 = new RecRecord1();
- r2.deserialize(in, "");
- istream.close();
- tmpfile.delete();
- assertTrue("Serialized and deserialized records do not match.", r1.equals(r2));
- } catch (IOException ex) {
- ex.printStackTrace();
- }
- }
-
- public void testCsv() {
- File tmpfile;
- try {
- tmpfile = File.createTempFile("hadooprec", ".txt");
- FileOutputStream ostream = new FileOutputStream(tmpfile);
- CsvRecordOutput out = new CsvRecordOutput(ostream);
- RecRecord1 r1 = new RecRecord1();
- r1.setBoolVal(true);
- r1.setByteVal((byte)0x66);
- r1.setFloatVal(3.145F);
- r1.setDoubleVal(1.5234);
- r1.setIntVal(4567);
- r1.setLongVal(0x5a5a5a5a5a5aL);
- r1.setStringVal("random text");
- r1.setBufferVal(new Buffer());
- r1.setVectorVal(new ArrayList());
- r1.setMapVal(new TreeMap());
- RecRecord0 r0 = new RecRecord0();
- r0.setStringVal("other random text");
- r1.setRecordVal(r0);
- r1.serialize(out, "");
- ostream.close();
- FileInputStream istream = new FileInputStream(tmpfile);
- CsvRecordInput in = new CsvRecordInput(istream);
- RecRecord1 r2 = new RecRecord1();
- r2.deserialize(in, "");
- istream.close();
- tmpfile.delete();
- assertTrue("Serialized and deserialized records do not match.", r1.equals(r2));
-
- } catch (IOException ex) {
- ex.printStackTrace();
- }
- }
-
- public void testToString() {
- try {
- RecRecord1 r1 = new RecRecord1();
- r1.setBoolVal(true);
- r1.setByteVal((byte)0x66);
- r1.setFloatVal(3.145F);
- r1.setDoubleVal(1.5234);
- r1.setIntVal(4567);
- r1.setLongVal(0x5a5a5a5a5a5aL);
- r1.setStringVal("random text");
- byte[] barr = new byte[256];
- for (int idx = 0; idx < 256; idx++) {
- barr[idx] = (byte) idx;
- }
- r1.setBufferVal(new Buffer(barr));
- r1.setVectorVal(new ArrayList());
- r1.setMapVal(new TreeMap());
- RecRecord0 r0 = new RecRecord0();
- r0.setStringVal("other random text");
- r1.setRecordVal(r0);
- System.err.println("Illustrating toString bug"+r1.toString());
- System.err.println("Illustrating toString bug"+r1.toString());
- } catch (Throwable ex) {
- assertTrue("Record.toString cannot be invoked twice in succession."+
- "This bug has been fixed in the latest version.", false);
- }
- }
-
- public void testXml() {
- File tmpfile;
- try {
- tmpfile = File.createTempFile("hadooprec", ".xml");
- FileOutputStream ostream = new FileOutputStream(tmpfile);
- XmlRecordOutput out = new XmlRecordOutput(ostream);
- RecRecord1 r1 = new RecRecord1();
- r1.setBoolVal(true);
- r1.setByteVal((byte)0x66);
- r1.setFloatVal(3.145F);
- r1.setDoubleVal(1.5234);
- r1.setIntVal(4567);
- r1.setLongVal(0x5a5a5a5a5a5aL);
- r1.setStringVal("ran\002dom < %text<&more\uffff");
- r1.setBufferVal(new Buffer());
- r1.setVectorVal(new ArrayList());
- r1.setMapVal(new TreeMap());
- RecRecord0 r0 = new RecRecord0();
- r0.setStringVal("other %rando\007m & >&more text");
- r1.setRecordVal(r0);
- r1.serialize(out, "");
- ostream.close();
- FileInputStream istream = new FileInputStream(tmpfile);
- XmlRecordInput in = new XmlRecordInput(istream);
- RecRecord1 r2 = new RecRecord1();
- r2.deserialize(in, "");
- istream.close();
- tmpfile.delete();
- assertTrue("Serialized and deserialized records do not match.", r1.equals(r2));
- } catch (IOException ex) {
- ex.printStackTrace();
- }
- }
-
- public void testCloneable() {
- RecRecord1 r1 = new RecRecord1();
- r1.setBoolVal(true);
- r1.setByteVal((byte)0x66);
- r1.setFloatVal(3.145F);
- r1.setDoubleVal(1.5234);
- r1.setIntVal(-4567);
- r1.setLongVal(-2367L);
- r1.setStringVal("random text");
- r1.setBufferVal(new Buffer());
- r1.setVectorVal(new ArrayList());
- r1.setMapVal(new TreeMap());
- RecRecord0 r0 = new RecRecord0();
- r0.setStringVal("other random text");
- r1.setRecordVal(r0);
- try {
- RecRecord1 r2 = (RecRecord1) r1.clone();
- assertTrue("Cloneable semantics violated. r1==r2", r1 != r2);
- assertTrue("Cloneable semantics violated. r1.getClass() != r2.getClass()",
- r1.getClass() == r2.getClass());
- assertTrue("Cloneable semantics violated. !r2.equals(r1)", r2.equals(r1));
- } catch (final CloneNotSupportedException ex) {
- ex.printStackTrace();
- }
- }
-}
diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/TestRecordVersioning.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/TestRecordVersioning.java
deleted file mode 100644
index 5977f03f85..0000000000
--- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/record/TestRecordVersioning.java
+++ /dev/null
@@ -1,241 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.record;
-
-import java.io.IOException;
-import junit.framework.*;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.util.ArrayList;
-import java.util.TreeMap;
-import org.apache.hadoop.record.meta.RecordTypeInfo;
-
-/**
- */
-public class TestRecordVersioning extends TestCase {
-
- public TestRecordVersioning(String testName) {
- super(testName);
- }
-
- @Override
- protected void setUp() throws Exception {
- }
-
- @Override
- protected void tearDown() throws Exception {
- }
-
- /*
- * basic versioning
- * write out a record and its type info, read it back using its typeinfo
- */
- public void testBasic() {
- File tmpfile, tmpRTIfile;
- try {
- tmpfile = File.createTempFile("hadooprec", ".dat");
- tmpRTIfile = File.createTempFile("hadooprti", ".dat");
- FileOutputStream ostream = new FileOutputStream(tmpfile);
- BinaryRecordOutput out = new BinaryRecordOutput(ostream);
- FileOutputStream oRTIstream = new FileOutputStream(tmpRTIfile);
- BinaryRecordOutput outRTI = new BinaryRecordOutput(oRTIstream);
- RecRecord1 r1 = new RecRecord1();
- r1.setBoolVal(true);
- r1.setByteVal((byte)0x66);
- r1.setFloatVal(3.145F);
- r1.setDoubleVal(1.5234);
- r1.setIntVal(-4567);
- r1.setLongVal(-2367L);
- r1.setStringVal("random text");
- r1.setBufferVal(new Buffer());
- r1.setVectorVal(new ArrayList());
- r1.setMapVal(new TreeMap());
- RecRecord0 r0 = new RecRecord0();
- r0.setStringVal("other random text");
- r1.setRecordVal(r0);
- r1.serialize(out, "");
- ostream.close();
- // write out the type info
- RecRecord1.getTypeInfo().serialize(outRTI);
- oRTIstream.close();
-
- // read
- FileInputStream istream = new FileInputStream(tmpfile);
- BinaryRecordInput in = new BinaryRecordInput(istream);
- FileInputStream iRTIstream = new FileInputStream(tmpRTIfile);
- BinaryRecordInput inRTI = new BinaryRecordInput(iRTIstream);
- RecordTypeInfo rti = new RecordTypeInfo();
- rti.deserialize(inRTI);
- iRTIstream.close();
- RecRecord1.setTypeFilter(rti);
- RecRecord1 r2 = new RecRecord1();
- r2.deserialize(in, "");
- istream.close();
- tmpfile.delete();
- tmpRTIfile.delete();
- assertTrue("Serialized and deserialized versioned records do not match.", r1.equals(r2));
- } catch (IOException ex) {
- ex.printStackTrace();
- }
- }
-
- /*
- * versioning
- * write out a record and its type info, read back a similar record using the written record's typeinfo
- */
- public void testVersioning() {
- File tmpfile, tmpRTIfile;
- try {
- tmpfile = File.createTempFile("hadooprec", ".dat");
- tmpRTIfile = File.createTempFile("hadooprti", ".dat");
- FileOutputStream ostream = new FileOutputStream(tmpfile);
- BinaryRecordOutput out = new BinaryRecordOutput(ostream);
- FileOutputStream oRTIstream = new FileOutputStream(tmpRTIfile);
- BinaryRecordOutput outRTI = new BinaryRecordOutput(oRTIstream);
-
- // we create an array of records to write
- ArrayList recsWrite = new ArrayList();
- int i, j, k, l;
- for (i=0; i<5; i++) {
- RecRecordOld s1Rec = new RecRecordOld();
-
- s1Rec.setName("This is record s1: " + i);
-
- ArrayList iA = new ArrayList();
- for (j=0; j<3; j++) {
- iA.add(new Long(i+j));
- }
- s1Rec.setIvec(iA);
-
- ArrayList> ssVec = new ArrayList>();
- for (j=0; j<2; j++) {
- ArrayList sVec = new ArrayList();
- for (k=0; k<3; k++) {
- RecRecord0 sRec = new RecRecord0("This is record s: ("+j+": "+k+")");
- sVec.add(sRec);
- }
- ssVec.add(sVec);
- }
- s1Rec.setSvec(ssVec);
-
- s1Rec.setInner(new RecRecord0("This is record s: " + i));
-
- ArrayList>> aaaVec = new ArrayList>>();
- for (l=0; l<2; l++) {
- ArrayList> aaVec = new ArrayList>();
- for (j=0; j<2; j++) {
- ArrayList aVec = new ArrayList();
- for (k=0; k<3; k++) {
- aVec.add(new String("THis is a nested string: (" + l + ": " + j + ": " + k + ")"));
- }
- aaVec.add(aVec);
- }
- aaaVec.add(aaVec);
- }
- s1Rec.setStrvec(aaaVec);
-
- s1Rec.setI1(100+i);
-
- java.util.TreeMap map1 = new java.util.TreeMap();
- map1.put(new Byte("23"), "23");
- map1.put(new Byte("11"), "11");
- s1Rec.setMap1(map1);
-
- java.util.TreeMap m1 = new java.util.TreeMap();
- java.util.TreeMap m2 = new java.util.TreeMap();
- m1.put(new Integer(5), 5L);
- m1.put(new Integer(10), 10L);
- m2.put(new Integer(15), 15L);
- m2.put(new Integer(20), 20L);
- java.util.ArrayList> vm1 = new java.util.ArrayList>();
- vm1.add(m1);
- vm1.add(m2);
- s1Rec.setMvec1(vm1);
- java.util.ArrayList> vm2 = new java.util.ArrayList>();
- vm2.add(m1);
- s1Rec.setMvec2(vm2);
-
- // add to our list
- recsWrite.add(s1Rec);
- }
-
- // write out to file
- for (RecRecordOld rec: recsWrite) {
- rec.serialize(out);
- }
- ostream.close();
- // write out the type info
- RecRecordOld.getTypeInfo().serialize(outRTI);
- oRTIstream.close();
-
- // read
- FileInputStream istream = new FileInputStream(tmpfile);
- BinaryRecordInput in = new BinaryRecordInput(istream);
- FileInputStream iRTIstream = new FileInputStream(tmpRTIfile);
- BinaryRecordInput inRTI = new BinaryRecordInput(iRTIstream);
- RecordTypeInfo rti = new RecordTypeInfo();
-
- // read type info
- rti.deserialize(inRTI);
- iRTIstream.close();
- RecRecordNew.setTypeFilter(rti);
-
- // read records
- ArrayList recsRead = new ArrayList();
- for (i=0; i> ss2Vec = s2In.getStrvec().get(j);
- ArrayList> ss1Vec = s1Out.getStrvec().get(j);
- for (k=0; k s2Vec = ss2Vec.get(k);
- ArrayList s1Vec = ss1Vec.get(k);
- for (l=0; l());
- r1.setMapVal(new TreeMap());
- r1.serialize(out, "");
- ostream.close();
- } catch (IOException ex) {
- ex.printStackTrace();
- }
- }
-
- public void testCsv() {
- File tmpfile;
- try {
- tmpfile = new File("/tmp/hadooptemp.txt");
- FileOutputStream ostream = new FileOutputStream(tmpfile);
- CsvRecordOutput out = new CsvRecordOutput(ostream);
- RecRecord1 r1 = new RecRecord1();
- r1.setBoolVal(true);
- r1.setByteVal((byte)0x66);
- r1.setFloatVal(3.145F);
- r1.setDoubleVal(1.5234);
- r1.setIntVal(4567);
- r1.setLongVal(0x5a5a5a5a5a5aL);
- r1.setStringVal("random text");
- r1.setBufferVal(new Buffer());
- r1.setVectorVal(new ArrayList());
- r1.setMapVal(new TreeMap());
- r1.serialize(out, "");
- ostream.close();
- } catch (IOException ex) {
- ex.printStackTrace();
- }
- }
-
- public void testXml() {
- File tmpfile;
- try {
- tmpfile = new File("/tmp/hadooptemp.xml");
- FileOutputStream ostream = new FileOutputStream(tmpfile);
- XmlRecordOutput out = new XmlRecordOutput(ostream);
- RecRecord1 r1 = new RecRecord1();
- r1.setBoolVal(true);
- r1.setByteVal((byte)0x66);
- r1.setFloatVal(3.145F);
- r1.setDoubleVal(1.5234);
- r1.setIntVal(4567);
- r1.setLongVal(0x5a5a5a5a5a5aL);
- r1.setStringVal("random text");
- r1.setBufferVal(new Buffer());
- r1.setVectorVal(new ArrayList());
- r1.setMapVal(new TreeMap());
- r1.serialize(out, "");
- ostream.close();
- } catch (IOException ex) {
- ex.printStackTrace();
- }
- }
-}
diff --git a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/typedbytes/TestIO.java b/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/typedbytes/TestIO.java
deleted file mode 100644
index a7ca587dc2..0000000000
--- a/hadoop-tools/hadoop-streaming/src/test/java/org/apache/hadoop/typedbytes/TestIO.java
+++ /dev/null
@@ -1,279 +0,0 @@
-/**
- * Licensed to the Apache Software Foundation (ASF) under one
- * or more contributor license agreements. See the NOTICE file
- * distributed with this work for additional information
- * regarding copyright ownership. The ASF licenses this file
- * to you under the Apache License, Version 2.0 (the
- * "License"); you may not use this file except in compliance
- * with the License. You may obtain a copy of the License at
- *
- * http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-package org.apache.hadoop.typedbytes;
-
-import java.io.ByteArrayInputStream;
-import java.io.ByteArrayOutputStream;
-import java.io.DataInputStream;
-import java.io.DataOutputStream;
-import java.io.File;
-import java.io.FileInputStream;
-import java.io.FileOutputStream;
-import java.io.IOException;
-import java.util.ArrayList;
-import java.util.Arrays;
-import java.util.HashMap;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.TreeMap;
-
-import org.apache.hadoop.io.ArrayWritable;
-import org.apache.hadoop.io.BooleanWritable;
-import org.apache.hadoop.io.ByteWritable;
-import org.apache.hadoop.io.BytesWritable;
-import org.apache.hadoop.io.DoubleWritable;
-import org.apache.hadoop.io.FloatWritable;
-import org.apache.hadoop.io.MapWritable;
-import org.apache.hadoop.io.Text;
-import org.apache.hadoop.io.VIntWritable;
-import org.apache.hadoop.io.VLongWritable;
-import org.apache.hadoop.io.Writable;
-import org.apache.hadoop.record.Buffer;
-import org.apache.hadoop.record.RecRecord0;
-import org.apache.hadoop.record.RecRecord1;
-
-import org.junit.After;
-import org.junit.Before;
-import org.junit.Test;
-import static org.junit.Assert.assertEquals;
-import static org.junit.Assert.assertTrue;
-
-public class TestIO {
-
- private File tmpfile;
- private File tmpdir;
-
- @Before
- public void setUp() throws Exception {
- this.tmpdir = new File(System.getProperty("test.build.data", "/tmp"));
- if(this.tmpdir.exists() || this.tmpdir.mkdirs()) {
- this.tmpfile = new File(this.tmpdir,
- "typedbytes.bin");
- } else {
- throw new IOException("Failed to create directory " + tmpdir.getAbsolutePath());
- }
- }
-
- @After
- public void tearDown() throws Exception {
- tmpfile.delete();
- }
-
- @Test
- public void testIO() throws IOException {
- ArrayList vector = new ArrayList();
- vector.add("test");
- vector.add(false);
- vector.add(12345);
- List list = new LinkedList();
- list.add("another test");
- list.add(true);
- list.add(123456789L);
- Map map = new HashMap();
- map.put("one", 1);
- map.put("vector", vector);
- Buffer buffer = new Buffer(new byte[] { 1, 2, 3, 4 });
- buffer.setCapacity(10);
- Object[] objects = new Object[] {
- buffer,
- (byte) 123, true, 12345, 123456789L, (float) 1.2, 1.234,
- "random string", vector, list, map
- };
-
- FileOutputStream ostream = new FileOutputStream(tmpfile);
- DataOutputStream dostream = new DataOutputStream(ostream);
- TypedBytesOutput out = new TypedBytesOutput(dostream);
- for (Object obj : objects) {
- out.write(obj);
- }
- dostream.close();
- ostream.close();
-
- FileInputStream istream = new FileInputStream(tmpfile);
- DataInputStream distream = new DataInputStream(istream);
- TypedBytesInput in = new TypedBytesInput(distream);
- for (Object obj : objects) {
- assertEquals(obj, in.read());
- }
- distream.close();
- istream.close();
-
- istream = new FileInputStream(tmpfile);
- distream = new DataInputStream(istream);
- in = new TypedBytesInput(distream);
- for (Object obj : objects) {
- byte[] bytes = in.readRaw();
- ByteArrayInputStream bais = new ByteArrayInputStream(bytes);
- DataInputStream dis = new DataInputStream(bais);
- assertEquals(obj, (new TypedBytesInput(dis)).read());
- ByteArrayOutputStream baos = new ByteArrayOutputStream();
- TypedBytesOutput tbout = new TypedBytesOutput(new DataOutputStream(baos));
- tbout.writeRaw(bytes);
- bais = new ByteArrayInputStream(bytes);
- dis = new DataInputStream(bais);
- assertEquals(obj, (new TypedBytesInput(dis)).read());
- }
- distream.close();
- istream.close();
- }
-
- @Test
- public void testCustomTypesIO() throws IOException {
- byte[] rawBytes = new byte[] { 100, 0, 0, 0, 3, 1, 2, 3 };
-
- FileOutputStream ostream = new FileOutputStream(tmpfile);
- DataOutputStream dostream = new DataOutputStream(ostream);
- TypedBytesOutput out = new TypedBytesOutput(dostream);
- out.writeRaw(rawBytes);
- dostream.close();
- ostream.close();
-
- FileInputStream istream = new FileInputStream(tmpfile);
- DataInputStream distream = new DataInputStream(istream);
- TypedBytesInput in = new TypedBytesInput(distream);
- assertTrue(Arrays.equals(rawBytes, in.readRaw()));
- distream.close();
- istream.close();
- }
-
- @Test
- public void testRecordIO() throws IOException {
- RecRecord1 r1 = new RecRecord1();
- r1.setBoolVal(true);
- r1.setByteVal((byte) 0x66);
- r1.setFloatVal(3.145F);
- r1.setDoubleVal(1.5234);
- r1.setIntVal(-4567);
- r1.setLongVal(-2367L);
- r1.setStringVal("random text");
- r1.setBufferVal(new Buffer());
- r1.setVectorVal(new ArrayList());
- r1.setMapVal(new TreeMap());
- RecRecord0 r0 = new RecRecord0();
- r0.setStringVal("other random text");
- r1.setRecordVal(r0);
-
- FileOutputStream ostream = new FileOutputStream(tmpfile);
- DataOutputStream dostream = new DataOutputStream(ostream);
- TypedBytesRecordOutput out = TypedBytesRecordOutput.get(dostream);
- r1.serialize(out, "");
- dostream.close();
- ostream.close();
-
- FileInputStream istream = new FileInputStream(tmpfile);
- DataInputStream distream = new DataInputStream(istream);
- TypedBytesRecordInput in = TypedBytesRecordInput.get(distream);
- RecRecord1 r2 = new RecRecord1();
- r2.deserialize(in, "");
- distream.close();
- istream.close();
- assertEquals(r1, r2);
- }
-
- @Test
- public void testWritableIO() throws IOException {
- Writable[] vectorValues = new Writable[] {
- new Text("test1"), new Text("test2"), new Text("test3")
- };
- ArrayWritable vector = new ArrayWritable(Text.class, vectorValues);
- MapWritable map = new MapWritable();
- map.put(new Text("one"), new VIntWritable(1));
- map.put(new Text("two"), new VLongWritable(2));
- Writable[] writables = new Writable[] {
- new BytesWritable(new byte[] { 1, 2, 3, 4 }),
- new ByteWritable((byte) 123), new BooleanWritable(true),
- new VIntWritable(12345), new VLongWritable(123456789L),
- new FloatWritable((float) 1.2), new DoubleWritable(1.234),
- new Text("random string")
- };
- TypedBytesWritable tbw = new TypedBytesWritable();
- tbw.setValue("typed bytes text");
- RecRecord1 r1 = new RecRecord1();
- r1.setBoolVal(true);
- r1.setByteVal((byte) 0x66);
- r1.setFloatVal(3.145F);
- r1.setDoubleVal(1.5234);
- r1.setIntVal(-4567);
- r1.setLongVal(-2367L);
- r1.setStringVal("random text");
- r1.setBufferVal(new Buffer());
- r1.setVectorVal(new ArrayList());
- r1.setMapVal(new TreeMap());
- RecRecord0 r0 = new RecRecord0();
- r0.setStringVal("other random text");
- r1.setRecordVal(r0);
-
- FileOutputStream ostream = new FileOutputStream(tmpfile);
- DataOutputStream dostream = new DataOutputStream(ostream);
- TypedBytesWritableOutput out = new TypedBytesWritableOutput(dostream);
- for (Writable w : writables) {
- out.write(w);
- }
- out.write(tbw);
- out.write(vector);
- out.write(map);
- out.write(r1);
- dostream.close();
- ostream.close();
-
- FileInputStream istream = new FileInputStream(tmpfile);
- DataInputStream distream = new DataInputStream(istream);
-
- TypedBytesWritableInput in = new TypedBytesWritableInput(distream);
- for (Writable w : writables) {
- assertEquals(w, in.read());
- }
-
- assertEquals(tbw.getValue().toString(), in.read().toString());
-
- assertEquals(ArrayWritable.class, in.readType());
- ArrayWritable aw = in.readArray();
- Writable[] writables1 = vector.get(), writables2 = aw.get();
- assertEquals(writables1.length, writables2.length);
- for (int i = 0; i < writables1.length; i++) {
- assertEquals(((Text) writables1[i]).toString(),
- ((TypedBytesWritable) writables2[i]).getValue());
- }
- assertEquals(MapWritable.class, in.readType());
-
- MapWritable mw = in.readMap();
- assertEquals(map.entrySet(), mw.entrySet());
-
- assertEquals(Type.LIST, TypedBytesInput.get(distream).readType());
- assertEquals(r1.getBoolVal(), TypedBytesInput.get(distream).read());
- assertEquals(r1.getByteVal(), TypedBytesInput.get(distream).read());
- assertEquals(r1.getIntVal(), TypedBytesInput.get(distream).read());
- assertEquals(r1.getLongVal(), TypedBytesInput.get(distream).read());
- assertEquals(r1.getFloatVal(), TypedBytesInput.get(distream).read());
- assertEquals(r1.getDoubleVal(), TypedBytesInput.get(distream).read());
- assertEquals(r1.getStringVal(), TypedBytesInput.get(distream).read());
- Object prevObj = null, obj = TypedBytesInput.get(distream).read();
- while (obj != null) {
- prevObj = obj;
- obj = TypedBytesInput.get(distream).read();
- }
- List recList = (List) prevObj;
- assertEquals(r0.getStringVal(), recList.get(0));
-
- distream.close();
- istream.close();
- }
-
-}