From 59830ca772dfb5dcc8b3e5281ca482dea5a5fa3e Mon Sep 17 00:00:00 2001 From: Andrew Wang Date: Tue, 19 Sep 2017 13:44:42 -0700 Subject: [PATCH] HDFS-12444. Reduce runtime of TestWriteReadStripedFile. Contributed by Huafeng Wang and Andrew Wang. --- .../hadoop/hdfs/StripedFileTestUtil.java | 13 ++++++---- .../hadoop/hdfs/TestWriteReadStripedFile.java | 24 +++++++++++-------- .../hdfs/TestWriteStripedFileWithFailure.java | 3 ++- 3 files changed, 25 insertions(+), 15 deletions(-) diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/StripedFileTestUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/StripedFileTestUtil.java index 1489e48195..c771d21ea1 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/StripedFileTestUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/StripedFileTestUtil.java @@ -79,10 +79,15 @@ static void verifyLength(FileSystem fs, Path srcPath, int fileLength) assertEquals("File length should be the same", fileLength, status.getLen()); } - static void verifyPread(FileSystem fs, Path srcPath, int fileLength, - byte[] expected, byte[] buf) throws IOException { - final ErasureCodingPolicy ecPolicy = - ((DistributedFileSystem)fs).getErasureCodingPolicy(srcPath); + static void verifyPread(DistributedFileSystem fs, Path srcPath, + int fileLength, byte[] expected, byte[] buf) throws IOException { + final ErasureCodingPolicy ecPolicy = fs.getErasureCodingPolicy(srcPath); + verifyPread(fs, srcPath, fileLength, expected, buf, ecPolicy); + } + + static void verifyPread(FileSystem fs, Path srcPath, int fileLength, + byte[] expected, byte[] buf, ErasureCodingPolicy ecPolicy) + throws IOException { try (FSDataInputStream in = fs.open(srcPath)) { int[] startOffsets = {0, 1, ecPolicy.getCellSize() - 102, ecPolicy.getCellSize(), ecPolicy.getCellSize() + 102, diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestWriteReadStripedFile.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestWriteReadStripedFile.java index f27c9786db..805bcea85b 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestWriteReadStripedFile.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestWriteReadStripedFile.java @@ -25,6 +25,7 @@ import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; +import org.apache.hadoop.hdfs.protocol.SystemErasureCodingPolicies; import org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy; import org.apache.hadoop.hdfs.server.datanode.DataNode; import org.apache.hadoop.hdfs.web.WebHdfsConstants; @@ -47,12 +48,13 @@ public class TestWriteReadStripedFile { public static final Log LOG = LogFactory.getLog(TestWriteReadStripedFile.class); private final ErasureCodingPolicy ecPolicy = - StripedFileTestUtil.getDefaultECPolicy(); + SystemErasureCodingPolicies.getByID( + SystemErasureCodingPolicies.RS_3_2_POLICY_ID); private final int cellSize = ecPolicy.getCellSize(); private final short dataBlocks = (short) ecPolicy.getNumDataUnits(); private final short parityBlocks = (short) ecPolicy.getNumParityUnits(); private final int numDNs = dataBlocks + parityBlocks; - private final int stripesPerBlock = 4; + private final int stripesPerBlock = 2; private final int blockSize = stripesPerBlock * cellSize; private final int blockGroupSize = blockSize * dataBlocks; @@ -78,11 +80,10 @@ public void setup() throws IOException { false); cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDNs).build(); fs = cluster.getFileSystem(); - fs.enableErasureCodingPolicy( - StripedFileTestUtil.getDefaultECPolicy().getName()); + fs.enableErasureCodingPolicy(ecPolicy.getName()); fs.mkdirs(new Path("/ec")); cluster.getFileSystem().getClient().setErasureCodingPolicy("/ec", - StripedFileTestUtil.getDefaultECPolicy().getName()); + ecPolicy.getName()); } @After @@ -225,7 +226,8 @@ private void testOneFileUsingDFSStripedInputStream(String src, int fileLength, byte[] smallBuf = new byte[1024]; byte[] largeBuf = new byte[fileLength + 100]; - StripedFileTestUtil.verifyPread(fs, srcPath, fileLength, expected, largeBuf); + StripedFileTestUtil.verifyPread(fs, srcPath, fileLength, expected, + largeBuf); StripedFileTestUtil.verifyStatefulRead(fs, srcPath, fileLength, expected, largeBuf); @@ -268,13 +270,15 @@ public void testWriteReadUsingWebHdfs() throws Exception { byte[] smallBuf = new byte[1024]; byte[] largeBuf = new byte[fileLength + 100]; - // TODO: HDFS-8797 - //StripedFileTestUtil.verifyPread(fs, srcPath, fileLength, expected, largeBuf); + StripedFileTestUtil + .verifyPread(fs, srcPath, fileLength, expected, largeBuf, ecPolicy); - StripedFileTestUtil.verifyStatefulRead(fs, srcPath, fileLength, expected, largeBuf); + StripedFileTestUtil + .verifyStatefulRead(fs, srcPath, fileLength, expected, largeBuf); StripedFileTestUtil.verifySeek(fs, srcPath, fileLength, ecPolicy, blockGroupSize); - StripedFileTestUtil.verifyStatefulRead(fs, srcPath, fileLength, expected, smallBuf); + StripedFileTestUtil + .verifyStatefulRead(fs, srcPath, fileLength, expected, smallBuf); // webhdfs doesn't support bytebuffer read } diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestWriteStripedFileWithFailure.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestWriteStripedFileWithFailure.java index 03e9e10bf1..c859b71097 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestWriteStripedFileWithFailure.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/TestWriteStripedFileWithFailure.java @@ -160,7 +160,8 @@ private void writeFileWithDNFailure(int fileLength, blockSize * dataBlocks); StripedFileTestUtil.verifyStatefulRead(fs, srcPath, fileLength, expected, smallBuf); - StripedFileTestUtil.verifyPread(fs, srcPath, fileLength, expected, largeBuf); + StripedFileTestUtil.verifyPread((DistributedFileSystem)fs, srcPath, + fileLength, expected, largeBuf); // delete the file fs.delete(srcPath, true);