From 08d6213083891eb7e1661d1b5f56121161868a9e Mon Sep 17 00:00:00 2001 From: Colin McCabe Date: Mon, 2 Dec 2013 17:28:53 +0000 Subject: [PATCH] HADOOP-10130. RawLocalFS pread does not track FileSystem Statistics (Binglin Chang via Colin Patrick McCabe) git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1547117 13f79535-47bb-0310-9956-ffa450edef68 --- .../hadoop-common/CHANGES.txt | 3 ++ .../apache/hadoop/fs/RawLocalFileSystem.java | 43 ++++--------------- .../hadoop/fs/FCStatisticsBaseTest.java | 1 + .../hadoop/fs/TestLocalFsFCStatistics.java | 3 +- 4 files changed, 14 insertions(+), 36 deletions(-) diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index dbb5b72553..d3186bfc37 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -516,6 +516,9 @@ Release 2.2.1 - UNRELEASED HADOOP-9114. After defined the dfs.checksum.type as the NULL, write file and hflush will through java.lang.ArrayIndexOutOfBoundsException (Sathish via umamahesh) + HADOOP-10130. RawLocalFS::LocalFSFileInputStream.pread does not track + FS::Statistics (Binglin Chang via Colin Patrick McCabe) + Release 2.2.0 - 2013-10-13 INCOMPATIBLE CHANGES diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java index c2e2458fe0..7d70ada73b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java @@ -83,39 +83,6 @@ public void initialize(URI uri, Configuration conf) throws IOException { setConf(conf); } - class TrackingFileInputStream extends FileInputStream { - public TrackingFileInputStream(File f) throws IOException { - super(f); - } - - @Override - public int read() throws IOException { - int result = super.read(); - if (result != -1) { - statistics.incrementBytesRead(1); - } - return result; - } - - @Override - public int read(byte[] data) throws IOException { - int result = super.read(data); - if (result != -1) { - statistics.incrementBytesRead(result); - } - return result; - } - - @Override - public int read(byte[] data, int offset, int length) throws IOException { - int result = super.read(data, offset, length); - if (result != -1) { - statistics.incrementBytesRead(result); - } - return result; - } - } - /******************************************************* * For open()'s FSInputStream. *******************************************************/ @@ -124,7 +91,7 @@ class LocalFSFileInputStream extends FSInputStream implements HasFileDescriptor private long position; public LocalFSFileInputStream(Path f) throws IOException { - this.fis = new TrackingFileInputStream(pathToFile(f)); + fis = new FileInputStream(pathToFile(f)); } @Override @@ -159,6 +126,7 @@ public int read() throws IOException { int value = fis.read(); if (value >= 0) { this.position++; + statistics.incrementBytesRead(1); } return value; } catch (IOException e) { // unexpected exception @@ -172,6 +140,7 @@ public int read(byte[] b, int off, int len) throws IOException { int value = fis.read(b, off, len); if (value > 0) { this.position += value; + statistics.incrementBytesRead(value); } return value; } catch (IOException e) { // unexpected exception @@ -184,7 +153,11 @@ public int read(long position, byte[] b, int off, int len) throws IOException { ByteBuffer bb = ByteBuffer.wrap(b, off, len); try { - return fis.getChannel().read(bb, position); + int value = fis.getChannel().read(bb, position); + if (value > 0) { + statistics.incrementBytesRead(value); + } + return value; } catch (IOException e) { throw new FSError(e); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java index cc80e7ced8..90337a6433 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/FCStatisticsBaseTest.java @@ -91,6 +91,7 @@ public void testStatistics() throws IOException, URISyntaxException { FSDataInputStream fstr = fc.open(filePath); byte[] buf = new byte[blockSize]; int bytesRead = fstr.read(buf, 0, blockSize); + fstr.read(0, buf, 0, blockSize); Assert.assertEquals(blockSize, bytesRead); verifyReadBytes(stats); verifyWrittenBytes(stats); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFsFCStatistics.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFsFCStatistics.java index 8a4552299b..fe26f73a2e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFsFCStatistics.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/TestLocalFsFCStatistics.java @@ -47,7 +47,8 @@ public void tearDown() throws Exception { @Override protected void verifyReadBytes(Statistics stats) { - Assert.assertEquals(blockSize, stats.getBytesRead()); + // one blockSize for read, one for pread + Assert.assertEquals(2*blockSize, stats.getBytesRead()); } @Override