HADOOP-8815. RandomDatum needs to override hashCode(). Contributed by Brandon Li.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1389661 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Suresh Srinivas 2012-09-25 00:11:56 +00:00
parent 5308a0ecd5
commit 44ae25a4e8
3 changed files with 20 additions and 0 deletions

View File

@ -228,6 +228,9 @@ Trunk (Unreleased)
HADOOP-8813. Add InterfaceAudience and InterfaceStability annotations HADOOP-8813. Add InterfaceAudience and InterfaceStability annotations
to RPC Server and Client classes. (Brandon Li via suresh) to RPC Server and Client classes. (Brandon Li via suresh)
HADOOP-8815. RandomDatum needs to override hashCode().
(Brandon Li via suresh)
OPTIMIZATIONS OPTIMIZATIONS
HADOOP-7761. Improve the performance of raw comparisons. (todd) HADOOP-7761. Improve the performance of raw comparisons. (todd)

View File

@ -21,6 +21,7 @@
import java.io.DataInput; import java.io.DataInput;
import java.io.DataOutput; import java.io.DataOutput;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays;
import java.util.Random; import java.util.Random;
@ -65,6 +66,11 @@ public boolean equals(Object o) {
return compareTo((RandomDatum)o) == 0; return compareTo((RandomDatum)o) == 0;
} }
@Override
public int hashCode() {
return Arrays.hashCode(this.data);
}
private static final char[] HEX_DIGITS = private static final char[] HEX_DIGITS =
{'0','1','2','3','4','5','6','7','8','9','a','b','c','d','e','f'}; {'0','1','2','3','4','5','6','7','8','9','a','b','c','d','e','f'};

View File

@ -34,6 +34,8 @@
import java.io.OutputStream; import java.io.OutputStream;
import java.io.OutputStreamWriter; import java.io.OutputStreamWriter;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Random; import java.util.Random;
import java.util.zip.GZIPInputStream; import java.util.zip.GZIPInputStream;
import java.util.zip.GZIPOutputStream; import java.util.zip.GZIPOutputStream;
@ -202,6 +204,15 @@ private static void codecTest(Configuration conf, int seed, int count,
v2.readFields(inflateIn); v2.readFields(inflateIn);
assertTrue("original and compressed-then-decompressed-output not equal", assertTrue("original and compressed-then-decompressed-output not equal",
k1.equals(k2) && v1.equals(v2)); k1.equals(k2) && v1.equals(v2));
// original and compressed-then-decompressed-output have the same hashCode
Map<RandomDatum, String> m = new HashMap<RandomDatum, String>();
m.put(k1, k1.toString());
m.put(v1, v1.toString());
String result = m.get(k2);
assertEquals("k1 and k2 hashcode not equal", result, k1.toString());
result = m.get(v2);
assertEquals("v1 and v2 hashcode not equal", result, v1.toString());
} }
// De-compress data byte-at-a-time // De-compress data byte-at-a-time