HADOOP-6169. Removing deprecated method calls in TFile. (hong tang via mahadev)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@800199 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Mahadev Konar 2009-08-03 05:06:59 +00:00
parent e5ea0ce4a6
commit 7be6fc4e5d
10 changed files with 24 additions and 22 deletions

View File

@ -488,6 +488,8 @@ Trunk (unreleased changes)
HADOOP-6160. Fix releaseaudit target to run on specific directories.
(gkesavan)
HADOOP-6169. Removing deprecated method calls in TFile. (hong tang via mahadev)
OPTIMIZATIONS
HADOOP-5595. NameNode does not need to run a replicator to choose a

View File

@ -125,7 +125,7 @@ public WBlockState(Algorithm compressionAlgo, FSDataOutputStream fsOut,
fsOutputBuffer.setCapacity(TFile.getFSOutputBufferSize(conf));
this.fsBufferedOutput =
new SimpleBufferedOutputStream(this.fsOut, fsOutputBuffer.get());
new SimpleBufferedOutputStream(this.fsOut, fsOutputBuffer.getBytes());
this.compressor = compressAlgo.getCompressor();
try {

View File

@ -116,12 +116,12 @@ public long skip(long n) throws IOException {
}
@Override
public void mark(int readlimit) {
public synchronized void mark(int readlimit) {
mark = pos;
}
@Override
public void reset() throws IOException {
public synchronized void reset() throws IOException {
if (mark < 0) throw new IOException("Resetting to invalid mark");
pos = mark;
}

View File

@ -34,7 +34,7 @@ public final class ByteArray implements RawComparable {
* @param other
*/
public ByteArray(BytesWritable other) {
this(other.get(), 0, other.getSize());
this(other.getBytes(), 0, other.getLength());
}
/**

View File

@ -16,6 +16,7 @@
*/
package org.apache.hadoop.io.file.tfile;
import java.io.Serializable;
import java.util.Comparator;
import org.apache.hadoop.io.RawComparator;
@ -72,7 +73,7 @@ public long magnitude() {
}
}
public static final class ScalarComparator implements Comparator<Scalar> {
public static final class ScalarComparator implements Comparator<Scalar>, Serializable {
@Override
public int compare(Scalar o1, Scalar o2) {
long diff = o1.magnitude() - o2.magnitude();
@ -83,7 +84,7 @@ public int compare(Scalar o1, Scalar o2) {
}
public static final class MemcmpRawComparator implements
RawComparator<Object> {
RawComparator<Object>, Serializable {
@Override
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
return WritableComparator.compareBytes(b1, s1, l1, b2, s2, l2);

View File

@ -325,10 +325,7 @@ public void close() throws IOException {
outIndex.close();
}
if (writerBCF != null) {
writerBCF.close();
writerBCF = null;
}
writerBCF.close();
}
} finally {
IOUtils.cleanup(LOG, blkAppender, writerBCF);
@ -1583,8 +1580,8 @@ public void get(BytesWritable key, BytesWritable value)
*/
public int getKey(BytesWritable key) throws IOException {
key.setSize(getKeyLength());
getKey(key.get());
return key.getSize();
getKey(key.getBytes());
return key.getLength();
}
/**
@ -1603,10 +1600,10 @@ public long getValue(BytesWritable value) throws IOException {
int remain;
while ((remain = valueBufferInputStream.getRemain()) > 0) {
value.setSize(size + remain);
dis.readFully(value.get(), size, remain);
dis.readFully(value.getBytes(), size, remain);
size += remain;
}
return value.getSize();
return value.getLength();
} finally {
dis.close();
}
@ -1645,8 +1642,8 @@ public long writeValue(OutputStream out) throws IOException {
while ((chunkSize = valueBufferInputStream.getRemain()) > 0) {
chunkSize = Math.min(chunkSize, MAX_VAL_TRANSFER_BUF_SIZE);
valTransferBuffer.setSize(chunkSize);
dis.readFully(valTransferBuffer.get(), 0, chunkSize);
out.write(valTransferBuffer.get(), 0, chunkSize);
dis.readFully(valTransferBuffer.getBytes(), 0, chunkSize);
out.write(valTransferBuffer.getBytes(), 0, chunkSize);
size += chunkSize;
}
return size;

View File

@ -353,6 +353,7 @@ public static int size() {
/**
* Return a string representation of the version.
*/
@Override
public String toString() {
return new StringBuilder("v").append(major).append(".").append(minor)
.toString();

View File

@ -42,9 +42,9 @@ public class TestTFile extends TestCase {
System.getProperty("test.build.data", "/tmp/tfile-test");
private FileSystem fs;
private Configuration conf;
private final int minBlockSize = 512;
private final int largeVal = 3 * 1024 * 1024;
private static String localFormatter = "%010d";
private static final int minBlockSize = 512;
private static final int largeVal = 3 * 1024 * 1024;
private static final String localFormatter = "%010d";
@Override
public void setUp() throws IOException {

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.io.file.tfile;
import java.io.IOException;
import java.io.Serializable;
import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.io.WritableComparator;
@ -42,7 +43,7 @@ public void setUp() throws IOException {
}
}
class MyComparator implements RawComparator<byte[]> {
class MyComparator implements RawComparator<byte[]>, Serializable {
@Override
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {

View File

@ -140,7 +140,7 @@ public void testVLongRandom() throws IOException {
for (int i = 0; i < data.length; ++i) {
int shift = rng.nextInt(Long.SIZE) + 1;
long mask = (1L << shift) - 1;
long a = rng.nextInt() << 32;
long a = ((long) rng.nextInt()) << 32;
long b = ((long) rng.nextInt()) & 0xffffffff;
data[i] = (a + b) & mask;
}