HADOOP-14081. S3A: Consider avoiding array copy in S3ABlockOutputStream (ByteArrayBlock). Contributed by Rajesh Balamohan

This commit is contained in:
Steve Loughran 2017-02-20 16:21:00 +00:00
parent 172b23af33
commit 8035749c26

View File

@ -298,6 +298,25 @@ DataBlock create(int limit) throws IOException {
} }
static class S3AByteArrayOutputStream extends ByteArrayOutputStream {
S3AByteArrayOutputStream(int size) {
super(size);
}
/**
* InputStream backed by the internal byte array
*
* @return
*/
ByteArrayInputStream getInputStream() {
ByteArrayInputStream bin = new ByteArrayInputStream(this.buf, 0, count);
this.reset();
this.buf = null;
return bin;
}
}
/** /**
* Stream to memory via a {@code ByteArrayOutputStream}. * Stream to memory via a {@code ByteArrayOutputStream}.
* *
@ -310,14 +329,14 @@ DataBlock create(int limit) throws IOException {
*/ */
static class ByteArrayBlock extends DataBlock { static class ByteArrayBlock extends DataBlock {
private ByteArrayOutputStream buffer; private S3AByteArrayOutputStream buffer;
private final int limit; private final int limit;
// cache data size so that it is consistent after the buffer is reset. // cache data size so that it is consistent after the buffer is reset.
private Integer dataSize; private Integer dataSize;
ByteArrayBlock(int limit) { ByteArrayBlock(int limit) {
this.limit = limit; this.limit = limit;
buffer = new ByteArrayOutputStream(); buffer = new S3AByteArrayOutputStream(limit);
} }
/** /**
@ -333,8 +352,7 @@ int dataSize() {
InputStream startUpload() throws IOException { InputStream startUpload() throws IOException {
super.startUpload(); super.startUpload();
dataSize = buffer.size(); dataSize = buffer.size();
ByteArrayInputStream bufferData = new ByteArrayInputStream( ByteArrayInputStream bufferData = buffer.getInputStream();
buffer.toByteArray());
buffer = null; buffer = null;
return bufferData; return bufferData;
} }