From 31ebccc96238136560f4210bdf6766fe18e0650c Mon Sep 17 00:00:00 2001 From: Lei Xu Date: Mon, 16 Oct 2017 19:44:30 -0700 Subject: [PATCH] HDFS-12613. Native EC coder should implement release() as idempotent function. (Lei (Eddy) Xu) --- .../erasurecode/coder/ErasureCodingStep.java | 5 +- .../coder/ErasureDecodingStep.java | 5 +- .../coder/ErasureEncodingStep.java | 5 +- .../coder/HHXORErasureDecodingStep.java | 12 ++-- .../coder/HHXORErasureEncodingStep.java | 10 ++- .../io/erasurecode/coder/util/HHUtil.java | 4 +- .../rawcoder/AbstractNativeRawDecoder.java | 14 ++++- .../rawcoder/AbstractNativeRawEncoder.java | 14 ++++- .../rawcoder/NativeRSRawDecoder.java | 11 ++-- .../rawcoder/NativeRSRawEncoder.java | 11 ++-- .../rawcoder/NativeXORRawDecoder.java | 14 +++-- .../rawcoder/NativeXORRawEncoder.java | 9 +-- .../rawcoder/RSLegacyRawDecoder.java | 6 +- .../rawcoder/RawErasureDecoder.java | 17 ++++-- .../rawcoder/RawErasureEncoder.java | 16 +++-- .../apache/hadoop/io/erasurecode/jni_common.c | 5 +- .../hadoop/io/erasurecode/jni_rs_decoder.c | 9 ++- .../hadoop/io/erasurecode/jni_rs_encoder.c | 9 ++- .../hadoop/io/erasurecode/jni_xor_decoder.c | 9 ++- .../hadoop/io/erasurecode/jni_xor_encoder.c | 9 ++- .../coder/TestErasureCoderBase.java | 18 +++++- .../coder/TestHHErasureCoderBase.java | 10 ++- .../rawcoder/RawErasureCoderBenchmark.java | 9 +-- .../rawcoder/TestDummyRawCoder.java | 15 ++++- .../rawcoder/TestNativeRSRawCoder.java | 6 ++ .../rawcoder/TestNativeXORRawCoder.java | 7 +++ .../rawcoder/TestRawCoderBase.java | 61 +++++++++++++++++-- .../hadoop/hdfs/DFSStripedOutputStream.java | 2 +- .../hadoop/hdfs/PositionStripeReader.java | 3 +- .../hadoop/hdfs/StatefulStripeReader.java | 3 +- .../org/apache/hadoop/hdfs/StripeReader.java | 7 ++- .../StripedBlockChecksumReconstructor.java | 2 +- .../StripedBlockReconstructor.java | 2 +- .../hadoop/hdfs/StripedFileTestUtil.java | 6 +- 34 files changed, 269 insertions(+), 76 deletions(-) diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCodingStep.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCodingStep.java index 9dd0aedb84..fb89d99a05 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCodingStep.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCodingStep.java @@ -21,6 +21,8 @@ import org.apache.hadoop.io.erasurecode.ECBlock; import org.apache.hadoop.io.erasurecode.ECChunk; +import java.io.IOException; + /** * Erasure coding step that's involved in encoding/decoding of a block group. */ @@ -47,7 +49,8 @@ public interface ErasureCodingStep { * @param inputChunks * @param outputChunks */ - void performCoding(ECChunk[] inputChunks, ECChunk[] outputChunks); + void performCoding(ECChunk[] inputChunks, ECChunk[] outputChunks) + throws IOException; /** * Notify erasure coder that all the chunks of input blocks are processed so diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecodingStep.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecodingStep.java index ae396a2909..24f55470e1 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecodingStep.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecodingStep.java @@ -22,6 +22,8 @@ import org.apache.hadoop.io.erasurecode.ECChunk; import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder; +import java.io.IOException; + /** * Erasure decoding step, a wrapper of all the necessary information to perform * a decoding step involved in the whole process of decoding a block group. @@ -50,7 +52,8 @@ public ErasureDecodingStep(ECBlock[] inputBlocks, int[] erasedIndexes, } @Override - public void performCoding(ECChunk[] inputChunks, ECChunk[] outputChunks) { + public void performCoding(ECChunk[] inputChunks, ECChunk[] outputChunks) + throws IOException { rawDecoder.decode(inputChunks, erasedIndexes, outputChunks); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncodingStep.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncodingStep.java index df4ed4b406..5fc5c7a099 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncodingStep.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncodingStep.java @@ -22,6 +22,8 @@ import org.apache.hadoop.io.erasurecode.ECChunk; import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder; +import java.io.IOException; + /** * Erasure encoding step, a wrapper of all the necessary information to perform * an encoding step involved in the whole process of encoding a block group. @@ -46,7 +48,8 @@ public ErasureEncodingStep(ECBlock[] inputBlocks, ECBlock[] outputBlocks, } @Override - public void performCoding(ECChunk[] inputChunks, ECChunk[] outputChunks) { + public void performCoding(ECChunk[] inputChunks, ECChunk[] outputChunks) + throws IOException { rawEncoder.encode(inputChunks, outputChunks); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureDecodingStep.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureDecodingStep.java index 98b85037d5..16a3c0fa61 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureDecodingStep.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureDecodingStep.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.io.erasurecode.coder; +import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.classification.InterfaceAudience; @@ -64,7 +65,8 @@ public HHXORErasureDecodingStep(ECBlock[] inputBlocks, int[] erasedIndexes, } @Override - public void performCoding(ECChunk[] inputChunks, ECChunk[] outputChunks) { + public void performCoding(ECChunk[] inputChunks, ECChunk[] outputChunks) + throws IOException { if (erasedIndexes.length == 0) { return; } @@ -74,7 +76,8 @@ public void performCoding(ECChunk[] inputChunks, ECChunk[] outputChunks) { performCoding(inputBuffers, outputBuffers); } - private void performCoding(ByteBuffer[] inputs, ByteBuffer[] outputs) { + private void performCoding(ByteBuffer[] inputs, ByteBuffer[] outputs) + throws IOException { final int numDataUnits = rsRawDecoder.getNumDataUnits(); final int numParityUnits = rsRawDecoder.getNumParityUnits(); final int numTotalUnits = numDataUnits + numParityUnits; @@ -119,7 +122,7 @@ private void performCoding(ByteBuffer[] inputs, ByteBuffer[] outputs) { private void doDecodeSingle(ByteBuffer[][] inputs, ByteBuffer[][] outputs, int erasedLocationToFix, int bufSize, - boolean isDirect) { + boolean isDirect) throws IOException { final int numDataUnits = rsRawDecoder.getNumDataUnits(); final int numParityUnits = rsRawDecoder.getNumParityUnits(); final int subPacketSize = getSubPacketSize(); @@ -261,7 +264,8 @@ private void doDecodeByPiggyBack(byte[][] inputs, int[] inputOffsets, private void doDecodeMultiAndParity(ByteBuffer[][] inputs, ByteBuffer[][] outputs, - int[] erasedLocationToFix, int bufSize) { + int[] erasedLocationToFix, int bufSize) + throws IOException { final int numDataUnits = rsRawDecoder.getNumDataUnits(); final int numParityUnits = rsRawDecoder.getNumParityUnits(); final int numTotalUnits = numDataUnits + numParityUnits; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureEncodingStep.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureEncodingStep.java index 11b1bf1b35..6a56442701 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureEncodingStep.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureEncodingStep.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.io.erasurecode.coder; +import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.classification.InterfaceAudience; @@ -56,13 +57,15 @@ public HHXORErasureEncodingStep(ECBlock[] inputBlocks, ECBlock[] outputBlocks, } @Override - public void performCoding(ECChunk[] inputChunks, ECChunk[] outputChunks) { + public void performCoding(ECChunk[] inputChunks, ECChunk[] outputChunks) + throws IOException { ByteBuffer[] inputBuffers = ECChunk.toBuffers(inputChunks); ByteBuffer[] outputBuffers = ECChunk.toBuffers(outputChunks); performCoding(inputBuffers, outputBuffers); } - private void performCoding(ByteBuffer[] inputs, ByteBuffer[] outputs) { + private void performCoding(ByteBuffer[] inputs, ByteBuffer[] outputs) + throws IOException { final int numDataUnits = this.rsRawEncoder.getNumDataUnits(); final int numParityUnits = this.rsRawEncoder.getNumParityUnits(); final int subSPacketSize = getSubPacketSize(); @@ -95,7 +98,8 @@ private void performCoding(ByteBuffer[] inputs, ByteBuffer[] outputs) { doEncode(hhInputs, hhOutputs); } - private void doEncode(ByteBuffer[][] inputs, ByteBuffer[][] outputs) { + private void doEncode(ByteBuffer[][] inputs, ByteBuffer[][] outputs) + throws IOException { final int numParityUnits = this.rsRawEncoder.getNumParityUnits(); // calc piggyBacks using first sub-packet diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/util/HHUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/util/HHUtil.java index cfb567e34c..91d02415bf 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/util/HHUtil.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/util/HHUtil.java @@ -17,6 +17,7 @@ */ package org.apache.hadoop.io.erasurecode.coder.util; +import java.io.IOException; import java.nio.ByteBuffer; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.classification.InterfaceAudience; @@ -64,7 +65,8 @@ public static ByteBuffer[] getPiggyBacksFromInput(ByteBuffer[] inputs, int[] piggyBackIndex, int numParityUnits, int pgIndex, - RawErasureEncoder encoder) { + RawErasureEncoder encoder) + throws IOException { ByteBuffer[] emptyInput = new ByteBuffer[inputs.length]; ByteBuffer[] tempInput = new ByteBuffer[inputs.length]; int[] inputPositions = new int[inputs.length]; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractNativeRawDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractNativeRawDecoder.java index d6aa8bdc39..e84574709f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractNativeRawDecoder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractNativeRawDecoder.java @@ -23,6 +23,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.IOException; import java.nio.ByteBuffer; /** @@ -38,7 +39,12 @@ public AbstractNativeRawDecoder(ErasureCoderOptions coderOptions) { } @Override - protected void doDecode(ByteBufferDecodingState decodingState) { + protected synchronized void doDecode(ByteBufferDecodingState decodingState) + throws IOException { + if (nativeCoder == 0) { + throw new IOException(String.format("%s closed", + getClass().getSimpleName())); + } int[] inputOffsets = new int[decodingState.inputs.length]; int[] outputOffsets = new int[decodingState.outputs.length]; @@ -63,10 +69,12 @@ protected void doDecode(ByteBufferDecodingState decodingState) { protected abstract void performDecodeImpl(ByteBuffer[] inputs, int[] inputOffsets, int dataLen, int[] erased, ByteBuffer[] outputs, - int[] outputOffsets); + int[] outputOffsets) + throws IOException; @Override - protected void doDecode(ByteArrayDecodingState decodingState) { + protected void doDecode(ByteArrayDecodingState decodingState) + throws IOException { PerformanceAdvisory.LOG.debug("convertToByteBufferState is invoked, " + "not efficiently. Please use direct ByteBuffer inputs/outputs"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractNativeRawEncoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractNativeRawEncoder.java index 21805fe921..cab53839b3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractNativeRawEncoder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/AbstractNativeRawEncoder.java @@ -23,6 +23,7 @@ import org.slf4j.Logger; import org.slf4j.LoggerFactory; +import java.io.IOException; import java.nio.ByteBuffer; /** @@ -38,7 +39,12 @@ public AbstractNativeRawEncoder(ErasureCoderOptions coderOptions) { } @Override - protected void doEncode(ByteBufferEncodingState encodingState) { + protected synchronized void doEncode(ByteBufferEncodingState encodingState) + throws IOException { + if (nativeCoder == 0) { + throw new IOException(String.format("%s closed", + getClass().getSimpleName())); + } int[] inputOffsets = new int[encodingState.inputs.length]; int[] outputOffsets = new int[encodingState.outputs.length]; int dataLen = encodingState.inputs[0].remaining(); @@ -60,10 +66,12 @@ protected void doEncode(ByteBufferEncodingState encodingState) { protected abstract void performEncodeImpl( ByteBuffer[] inputs, int[] inputOffsets, - int dataLen, ByteBuffer[] outputs, int[] outputOffsets); + int dataLen, ByteBuffer[] outputs, int[] outputOffsets) + throws IOException; @Override - protected void doEncode(ByteArrayEncodingState encodingState) { + protected void doEncode(ByteArrayEncodingState encodingState) + throws IOException { PerformanceAdvisory.LOG.debug("convertToByteBufferState is invoked, " + "not efficiently. Please use direct ByteBuffer inputs/outputs"); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/NativeRSRawDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/NativeRSRawDecoder.java index ce4ec2b1c7..8572222303 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/NativeRSRawDecoder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/NativeRSRawDecoder.java @@ -21,6 +21,7 @@ import org.apache.hadoop.io.erasurecode.ErasureCodeNative; import org.apache.hadoop.io.erasurecode.ErasureCoderOptions; +import java.io.IOException; import java.nio.ByteBuffer; /** @@ -39,14 +40,14 @@ public NativeRSRawDecoder(ErasureCoderOptions coderOptions) { } @Override - protected void performDecodeImpl(ByteBuffer[] inputs, int[] inputOffsets, - int dataLen, int[] erased, - ByteBuffer[] outputs, int[] outputOffsets) { + protected synchronized void performDecodeImpl( + ByteBuffer[] inputs, int[] inputOffsets, int dataLen, int[] erased, + ByteBuffer[] outputs, int[] outputOffsets) throws IOException { decodeImpl(inputs, inputOffsets, dataLen, erased, outputs, outputOffsets); } @Override - public void release() { + public synchronized void release() { destroyImpl(); } @@ -59,7 +60,7 @@ public boolean preferDirectBuffer() { private native void decodeImpl( ByteBuffer[] inputs, int[] inputOffsets, int dataLen, int[] erased, - ByteBuffer[] outputs, int[] outputOffsets); + ByteBuffer[] outputs, int[] outputOffsets) throws IOException; private native void destroyImpl(); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/NativeRSRawEncoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/NativeRSRawEncoder.java index 70b5a465ef..754ec88410 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/NativeRSRawEncoder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/NativeRSRawEncoder.java @@ -21,6 +21,7 @@ import org.apache.hadoop.io.erasurecode.ErasureCodeNative; import org.apache.hadoop.io.erasurecode.ErasureCoderOptions; +import java.io.IOException; import java.nio.ByteBuffer; /** @@ -39,14 +40,14 @@ public NativeRSRawEncoder(ErasureCoderOptions coderOptions) { } @Override - protected void performEncodeImpl( + protected synchronized void performEncodeImpl( ByteBuffer[] inputs, int[] inputOffsets, int dataLen, - ByteBuffer[] outputs, int[] outputOffsets) { + ByteBuffer[] outputs, int[] outputOffsets) throws IOException { encodeImpl(inputs, inputOffsets, dataLen, outputs, outputOffsets); } @Override - public void release() { + public synchronized void release() { destroyImpl(); } @@ -58,8 +59,8 @@ public boolean preferDirectBuffer() { private native void initImpl(int numDataUnits, int numParityUnits); private native void encodeImpl(ByteBuffer[] inputs, int[] inputOffsets, - int dataLen, ByteBuffer[] outputs, - int[] outputOffsets); + int dataLen, ByteBuffer[] outputs, + int[] outputOffsets) throws IOException; private native void destroyImpl(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/NativeXORRawDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/NativeXORRawDecoder.java index b6b1673312..1763042498 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/NativeXORRawDecoder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/NativeXORRawDecoder.java @@ -21,6 +21,7 @@ import org.apache.hadoop.io.erasurecode.ErasureCodeNative; import org.apache.hadoop.io.erasurecode.ErasureCoderOptions; +import java.io.IOException; import java.nio.ByteBuffer; /** @@ -39,21 +40,26 @@ public NativeXORRawDecoder(ErasureCoderOptions coderOptions) { } @Override - protected void performDecodeImpl(ByteBuffer[] inputs, int[] inputOffsets, - int dataLen, int[] erased, ByteBuffer[] outputs, int[] outputOffsets) { + protected synchronized void performDecodeImpl( + ByteBuffer[] inputs, int[] inputOffsets, int dataLen, int[] erased, + ByteBuffer[] outputs, int[] outputOffsets) throws IOException { decodeImpl(inputs, inputOffsets, dataLen, erased, outputs, outputOffsets); } @Override - public void release() { + public synchronized void release() { destroyImpl(); } private native void initImpl(int numDataUnits, int numParityUnits); + /** + * Native implementation of decoding. + * @throws IOException if the decoder is closed. + */ private native void decodeImpl( ByteBuffer[] inputs, int[] inputOffsets, int dataLen, int[] erased, - ByteBuffer[] outputs, int[] outputOffsets); + ByteBuffer[] outputs, int[] outputOffsets) throws IOException; private native void destroyImpl(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/NativeXORRawEncoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/NativeXORRawEncoder.java index 9b4b44912c..7f4265b2fa 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/NativeXORRawEncoder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/NativeXORRawEncoder.java @@ -21,6 +21,7 @@ import org.apache.hadoop.io.erasurecode.ErasureCodeNative; import org.apache.hadoop.io.erasurecode.ErasureCoderOptions; +import java.io.IOException; import java.nio.ByteBuffer; /** @@ -39,14 +40,14 @@ public NativeXORRawEncoder(ErasureCoderOptions coderOptions) { } @Override - protected void performEncodeImpl( + protected synchronized void performEncodeImpl( ByteBuffer[] inputs, int[] inputOffsets, int dataLen, - ByteBuffer[] outputs, int[] outputOffsets) { + ByteBuffer[] outputs, int[] outputOffsets) throws IOException { encodeImpl(inputs, inputOffsets, dataLen, outputs, outputOffsets); } @Override - public void release() { + public synchronized void release() { destroyImpl(); } @@ -54,7 +55,7 @@ public void release() { private native void encodeImpl(ByteBuffer[] inputs, int[] inputOffsets, int dataLen, ByteBuffer[] outputs, - int[] outputOffsets); + int[] outputOffsets) throws IOException; private native void destroyImpl(); } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSLegacyRawDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSLegacyRawDecoder.java index cfd7d29eb8..f4257aef3f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSLegacyRawDecoder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RSLegacyRawDecoder.java @@ -22,6 +22,7 @@ import org.apache.hadoop.io.erasurecode.ErasureCoderOptions; import org.apache.hadoop.io.erasurecode.rawcoder.util.RSUtil; +import java.io.IOException; import java.nio.ByteBuffer; /** @@ -53,7 +54,7 @@ public RSLegacyRawDecoder(ErasureCoderOptions coderOptions) { @Override public void decode(ByteBuffer[] inputs, int[] erasedIndexes, - ByteBuffer[] outputs) { + ByteBuffer[] outputs) throws IOException { // Make copies avoiding affecting original ones; ByteBuffer[] newInputs = new ByteBuffer[inputs.length]; int[] newErasedIndexes = new int[erasedIndexes.length]; @@ -67,7 +68,8 @@ public void decode(ByteBuffer[] inputs, int[] erasedIndexes, } @Override - public void decode(byte[][] inputs, int[] erasedIndexes, byte[][] outputs) { + public void decode(byte[][] inputs, int[] erasedIndexes, byte[][] outputs) + throws IOException { // Make copies avoiding affecting original ones; byte[][] newInputs = new byte[inputs.length][]; int[] newErasedIndexes = new int[erasedIndexes.length]; diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureDecoder.java index a29b47286f..249930ebe3 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureDecoder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureDecoder.java @@ -21,6 +21,7 @@ import org.apache.hadoop.io.erasurecode.ECChunk; import org.apache.hadoop.io.erasurecode.ErasureCoderOptions; +import java.io.IOException; import java.nio.ByteBuffer; /** @@ -81,7 +82,7 @@ public RawErasureDecoder(ErasureCoderOptions coderOptions) { * erasedIndexes, ready for read after the call */ public void decode(ByteBuffer[] inputs, int[] erasedIndexes, - ByteBuffer[] outputs) { + ByteBuffer[] outputs) throws IOException { ByteBufferDecodingState decodingState = new ByteBufferDecodingState(this, inputs, erasedIndexes, outputs); @@ -117,7 +118,8 @@ public void decode(ByteBuffer[] inputs, int[] erasedIndexes, * Perform the real decoding using Direct ByteBuffer. * @param decodingState the decoding state */ - protected abstract void doDecode(ByteBufferDecodingState decodingState); + protected abstract void doDecode(ByteBufferDecodingState decodingState) + throws IOException; /** * Decode with inputs and erasedIndexes, generates outputs. More see above. @@ -126,8 +128,10 @@ public void decode(ByteBuffer[] inputs, int[] erasedIndexes, * @param erasedIndexes indexes of erased units in the inputs array * @param outputs output buffers to put decoded data into according to * erasedIndexes, ready for read after the call + * @throws IOException if the decoder is closed. */ - public void decode(byte[][] inputs, int[] erasedIndexes, byte[][] outputs) { + public void decode(byte[][] inputs, int[] erasedIndexes, byte[][] outputs) + throws IOException { ByteArrayDecodingState decodingState = new ByteArrayDecodingState(this, inputs, erasedIndexes, outputs); @@ -142,8 +146,10 @@ public void decode(byte[][] inputs, int[] erasedIndexes, byte[][] outputs) { * Perform the real decoding using bytes array, supporting offsets and * lengths. * @param decodingState the decoding state + * @throws IOException if the decoder is closed. */ - protected abstract void doDecode(ByteArrayDecodingState decodingState); + protected abstract void doDecode(ByteArrayDecodingState decodingState) + throws IOException; /** * Decode with inputs and erasedIndexes, generates outputs. More see above. @@ -155,9 +161,10 @@ public void decode(byte[][] inputs, int[] erasedIndexes, byte[][] outputs) { * @param erasedIndexes indexes of erased units in the inputs array * @param outputs output buffers to put decoded data into according to * erasedIndexes, ready for read after the call + * @throws IOException if the decoder is closed */ public void decode(ECChunk[] inputs, int[] erasedIndexes, - ECChunk[] outputs) { + ECChunk[] outputs) throws IOException { ByteBuffer[] newInputs = CoderUtil.toBuffers(inputs); ByteBuffer[] newOutputs = CoderUtil.toBuffers(outputs); decode(newInputs, erasedIndexes, newOutputs); diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureEncoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureEncoder.java index 36d68f48fb..6d2ecd2052 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureEncoder.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureEncoder.java @@ -21,6 +21,7 @@ import org.apache.hadoop.io.erasurecode.ECChunk; import org.apache.hadoop.io.erasurecode.ErasureCoderOptions; +import java.io.IOException; import java.nio.ByteBuffer; /** @@ -62,8 +63,10 @@ public RawErasureEncoder(ErasureCoderOptions coderOptions) { * be 0 after encoding * @param outputs output buffers to put the encoded data into, ready to read * after the call + * @throws IOException if the encoder is closed. */ - public void encode(ByteBuffer[] inputs, ByteBuffer[] outputs) { + public void encode(ByteBuffer[] inputs, ByteBuffer[] outputs) + throws IOException { ByteBufferEncodingState bbeState = new ByteBufferEncodingState( this, inputs, outputs); @@ -99,7 +102,8 @@ public void encode(ByteBuffer[] inputs, ByteBuffer[] outputs) { * Perform the real encoding work using direct ByteBuffer. * @param encodingState the encoding state */ - protected abstract void doEncode(ByteBufferEncodingState encodingState); + protected abstract void doEncode(ByteBufferEncodingState encodingState) + throws IOException; /** * Encode with inputs and generates outputs. More see above. @@ -108,7 +112,7 @@ public void encode(ByteBuffer[] inputs, ByteBuffer[] outputs) { * @param outputs output buffers to put the encoded data into, read to read * after the call */ - public void encode(byte[][] inputs, byte[][] outputs) { + public void encode(byte[][] inputs, byte[][] outputs) throws IOException { ByteArrayEncodingState baeState = new ByteArrayEncodingState( this, inputs, outputs); @@ -125,7 +129,8 @@ public void encode(byte[][] inputs, byte[][] outputs) { * and lengths. * @param encodingState the encoding state */ - protected abstract void doEncode(ByteArrayEncodingState encodingState); + protected abstract void doEncode(ByteArrayEncodingState encodingState) + throws IOException; /** * Encode with inputs and generates outputs. More see above. @@ -133,8 +138,9 @@ public void encode(byte[][] inputs, byte[][] outputs) { * @param inputs input buffers to read data from * @param outputs output buffers to put the encoded data into, read to read * after the call + * @throws IOException if the encoder is closed. */ - public void encode(ECChunk[] inputs, ECChunk[] outputs) { + public void encode(ECChunk[] inputs, ECChunk[] outputs) throws IOException { ByteBuffer[] newInputs = ECChunk.toBuffers(inputs); ByteBuffer[] newOutputs = ECChunk.toBuffers(outputs); encode(newInputs, newOutputs); diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/jni_common.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/jni_common.c index 2b1d9eac0c..9cca6dd754 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/jni_common.c +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/jni_common.c @@ -63,8 +63,9 @@ IsalCoder* getCoder(JNIEnv* env, jobject thiz) { "Field nativeCoder not found"); } pCoder = (IsalCoder*)(*env)->GetLongField(env, thiz, fid); - pCoder->verbose = (verbose == JNI_TRUE) ? 1 : 0; - + if (pCoder != NULL) { + pCoder->verbose = (verbose == JNI_TRUE) ? 1 : 0; + } return pCoder; } diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/jni_rs_decoder.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/jni_rs_decoder.c index eb4b903da4..52d255afd5 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/jni_rs_decoder.c +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/jni_rs_decoder.c @@ -48,6 +48,10 @@ JNIEnv *env, jobject thiz, jobjectArray inputs, jintArray inputOffsets, jint dataLen, jintArray erasedIndexes, jobjectArray outputs, jintArray outputOffsets) { RSDecoder* rsDecoder = (RSDecoder*)getCoder(env, thiz); + if (!rsDecoder) { + THROW(env, "java/io/IOException", "NativeRSRawDecoder closed"); + return; + } int numDataUnits = rsDecoder->decoder.coder.numDataUnits; int numParityUnits = rsDecoder->decoder.coder.numParityUnits; @@ -68,5 +72,8 @@ JNIEXPORT void JNICALL Java_org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawDecoder_destroyImpl( JNIEnv *env, jobject thiz) { RSDecoder* rsDecoder = (RSDecoder*)getCoder(env, thiz); - free(rsDecoder); + if (rsDecoder) { + free(rsDecoder); + setCoder(env, thiz, NULL); + } } diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/jni_rs_encoder.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/jni_rs_encoder.c index 6c477ed456..a427b21f7e 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/jni_rs_encoder.c +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/jni_rs_encoder.c @@ -47,6 +47,10 @@ Java_org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawEncoder_encodeImpl( JNIEnv *env, jobject thiz, jobjectArray inputs, jintArray inputOffsets, jint dataLen, jobjectArray outputs, jintArray outputOffsets) { RSEncoder* rsEncoder = (RSEncoder*)getCoder(env, thiz); + if (!rsEncoder) { + THROW(env, "java/io/IOException", "NativeRSRawEncoder closed"); + return; + } int numDataUnits = rsEncoder->encoder.coder.numDataUnits; int numParityUnits = rsEncoder->encoder.coder.numParityUnits; @@ -62,5 +66,8 @@ JNIEXPORT void JNICALL Java_org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawEncoder_destroyImpl( JNIEnv *env, jobject thiz) { RSEncoder* rsEncoder = (RSEncoder*)getCoder(env, thiz); - free(rsEncoder); + if (rsEncoder) { + free(rsEncoder); + setCoder(env, thiz, NULL); + } } diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/jni_xor_decoder.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/jni_xor_decoder.c index e8abd8be9e..d2de0c67cf 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/jni_xor_decoder.c +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/jni_xor_decoder.c @@ -54,6 +54,10 @@ Java_org_apache_hadoop_io_erasurecode_rawcoder_NativeXORRawDecoder_decodeImpl( XORDecoder* xorDecoder; xorDecoder = (XORDecoder*)getCoder(env, thiz); + if (!xorDecoder) { + THROW(env, "java/io/IOException", "NativeXORRawDecoder closed"); + return; + } numDataUnits = ((IsalCoder*)xorDecoder)->numDataUnits; numParityUnits = ((IsalCoder*)xorDecoder)->numParityUnits; chunkSize = (int)dataLen; @@ -76,5 +80,8 @@ JNIEXPORT void JNICALL Java_org_apache_hadoop_io_erasurecode_rawcoder_NativeXORRawDecoder_destroyImpl (JNIEnv *env, jobject thiz){ XORDecoder* xorDecoder = (XORDecoder*)getCoder(env, thiz); - free(xorDecoder); + if (xorDecoder) { + free(xorDecoder); + setCoder(env, thiz, NULL); + } } diff --git a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/jni_xor_encoder.c b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/jni_xor_encoder.c index 6efb5fb951..ff3bf50e71 100644 --- a/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/jni_xor_encoder.c +++ b/hadoop-common-project/hadoop-common/src/main/native/src/org/apache/hadoop/io/erasurecode/jni_xor_encoder.c @@ -54,6 +54,10 @@ Java_org_apache_hadoop_io_erasurecode_rawcoder_NativeXORRawEncoder_encodeImpl( XOREncoder* xorEncoder; xorEncoder = (XOREncoder*)getCoder(env, thiz); + if (!xorEncoder) { + THROW(env, "java/io/IOException", "NativeXORRawEncoder closed"); + return; + } numDataUnits = ((IsalCoder*)xorEncoder)->numDataUnits; numParityUnits = ((IsalCoder*)xorEncoder)->numParityUnits; chunkSize = (int)dataLen; @@ -78,5 +82,8 @@ JNIEXPORT void JNICALL Java_org_apache_hadoop_io_erasurecode_rawcoder_NativeXORRawEncoder_destroyImpl (JNIEnv *env, jobject thiz) { XOREncoder* xorEncoder = (XOREncoder*)getCoder(env, thiz); - free(xorEncoder); + if (xorEncoder) { + free(xorEncoder); + setCoder(env, thiz, NULL); + } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestErasureCoderBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestErasureCoderBase.java index acbc136999..753c16a4b5 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestErasureCoderBase.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestErasureCoderBase.java @@ -23,8 +23,11 @@ import org.apache.hadoop.io.erasurecode.ErasureCoderOptions; import org.apache.hadoop.io.erasurecode.TestCoderBase; +import java.io.IOException; import java.lang.reflect.Constructor; +import static org.junit.Assert.fail; + /** * Erasure coder test base with utilities. */ @@ -85,14 +88,22 @@ private void performTestCoding(int chunkSize, boolean usingSlicedBuffer) { ErasureCodingStep codingStep; codingStep = encoder.calculateCoding(blockGroup); - performCodingStep(codingStep); + try { + performCodingStep(codingStep); + } catch (IOException e) { + fail("Should not expect IOException: " + e.getMessage()); + } // Erase specified sources but return copies of them for later comparing TestBlock[] backupBlocks = backupAndEraseBlocks(clonedDataBlocks, parityBlocks); // Decode blockGroup = new ECBlockGroup(clonedDataBlocks, blockGroup.getParityBlocks()); codingStep = decoder.calculateCoding(blockGroup); - performCodingStep(codingStep); + try { + performCodingStep(codingStep); + } catch (IOException e) { + fail("Should not expect IOException: " + e.getMessage()); + } // Compare compareAndVerify(backupBlocks, codingStep.getOutputBlocks()); @@ -102,7 +113,8 @@ private void performTestCoding(int chunkSize, boolean usingSlicedBuffer) { * This is typically how a coding step should be performed. * @param codingStep */ - protected void performCodingStep(ErasureCodingStep codingStep) { + protected void performCodingStep(ErasureCodingStep codingStep) + throws IOException { // Pretend that we're opening these input blocks and output blocks. ECBlock[] inputBlocks = codingStep.getInputBlocks(); ECBlock[] outputBlocks = codingStep.getOutputBlocks(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestHHErasureCoderBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestHHErasureCoderBase.java index 54711536ac..c27672a07a 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestHHErasureCoderBase.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/coder/TestHHErasureCoderBase.java @@ -20,6 +20,10 @@ import org.apache.hadoop.io.erasurecode.ECBlock; import org.apache.hadoop.io.erasurecode.ECChunk; +import java.io.IOException; + +import static org.junit.Assert.fail; + /** * Erasure coder test base with utilities for hitchhiker. @@ -53,7 +57,11 @@ protected void performCodingStep(ErasureCodingStep codingStep) { } // Given the input chunks and output chunk buffers, just call it ! - codingStep.performCoding(inputChunks, outputChunks); + try { + codingStep.performCoding(inputChunks, outputChunks); + } catch (IOException e) { + fail("Unexpected IOException: " + e.getMessage()); + } } codingStep.finish(); diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoderBenchmark.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoderBenchmark.java index bb4a1f0647..c005e77cb3 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoderBenchmark.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureCoderBenchmark.java @@ -22,6 +22,7 @@ import org.apache.hadoop.io.erasurecode.ErasureCoderOptions; import org.apache.hadoop.util.StopWatch; +import java.io.IOException; import java.nio.ByteBuffer; import java.text.DecimalFormat; import java.util.ArrayList; @@ -232,7 +233,7 @@ public static void performBench(String opType, CODER coder, } } - private static RawErasureEncoder getRawEncoder(int index) { + private static RawErasureEncoder getRawEncoder(int index) throws IOException { RawErasureEncoder encoder = CODER_MAKERS.get(index).createEncoder(BenchData.OPTIONS); final boolean isDirect = encoder.preferDirectBuffer(); @@ -242,7 +243,7 @@ private static RawErasureEncoder getRawEncoder(int index) { return encoder; } - private static RawErasureDecoder getRawDecoder(int index) { + private static RawErasureDecoder getRawDecoder(int index) throws IOException { RawErasureDecoder decoder = CODER_MAKERS.get(index).createDecoder(BenchData.OPTIONS); final boolean isDirect = decoder.preferDirectBuffer(); @@ -341,11 +342,11 @@ public void prepareDecInput() { System.arraycopy(inputs, 0, decodeInputs, 0, NUM_DATA_UNITS); } - public void encode(RawErasureEncoder encoder) { + public void encode(RawErasureEncoder encoder) throws IOException { encoder.encode(inputs, outputs); } - public void decode(RawErasureDecoder decoder) { + public void decode(RawErasureDecoder decoder) throws IOException { decoder.decode(decodeInputs, ERASED_INDEXES, outputs); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestDummyRawCoder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestDummyRawCoder.java index 6f5871ccdf..b936ff8b5d 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestDummyRawCoder.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestDummyRawCoder.java @@ -18,9 +18,11 @@ package org.apache.hadoop.io.erasurecode.rawcoder; import org.apache.hadoop.io.erasurecode.ECChunk; +import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import java.io.IOException; import java.nio.ByteBuffer; /** @@ -59,7 +61,11 @@ protected void testCoding(boolean usingDirectBuffer) { ECChunk[] dataChunks = prepareDataChunksForEncoding(); markChunks(dataChunks); ECChunk[] parityChunks = prepareParityChunksForEncoding(); - encoder.encode(dataChunks, parityChunks); + try { + encoder.encode(dataChunks, parityChunks); + } catch (IOException e) { + Assert.fail("Unexpected IOException: " + e.getMessage()); + } compareAndVerify(parityChunks, getEmptyChunks(parityChunks.length)); // Decode @@ -69,7 +75,12 @@ protected void testCoding(boolean usingDirectBuffer) { dataChunks, parityChunks); ensureOnlyLeastRequiredChunks(inputChunks); ECChunk[] recoveredChunks = prepareOutputChunksForDecoding(); - decoder.decode(inputChunks, getErasedIndexesForDecoding(), recoveredChunks); + try { + decoder.decode(inputChunks, getErasedIndexesForDecoding(), + recoveredChunks); + } catch (IOException e) { + Assert.fail("Unexpected IOException: " + e.getMessage()); + } compareAndVerify(recoveredChunks, getEmptyChunks(recoveredChunks.length)); } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeRSRawCoder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeRSRawCoder.java index e3536d8d0c..d56045e78e 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeRSRawCoder.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeRSRawCoder.java @@ -118,4 +118,10 @@ public void testCoding_10x4_erasing_d0_p0() { prepare(null, 10, 4, new int[] {0}, new int[] {0}); testCodingDoMixAndTwice(); } + + @Test + public void testAfterRelease63() throws Exception { + prepare(6, 3, null, null); + testAfterRelease(); + } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeXORRawCoder.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeXORRawCoder.java index 5adefbe6d5..90e94107c4 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeXORRawCoder.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestNativeXORRawCoder.java @@ -20,6 +20,7 @@ import org.apache.hadoop.io.erasurecode.ErasureCodeNative; import org.junit.Assume; import org.junit.Before; +import org.junit.Test; /** * Test NativeXOR encoding and decoding. @@ -33,4 +34,10 @@ public void setup() { this.decoderFactoryClass = NativeXORRawErasureCoderFactory.class; setAllowDump(true); } + + @Test + public void testAfterRelease63() throws Exception { + prepare(6, 3, null, null); + testAfterRelease(); + } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawCoderBase.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawCoderBase.java index 01c743a240..4519e357bd 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawCoderBase.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/erasurecode/rawcoder/TestRawCoderBase.java @@ -20,9 +20,12 @@ import org.apache.hadoop.io.erasurecode.ECChunk; import org.apache.hadoop.io.erasurecode.ErasureCoderOptions; import org.apache.hadoop.io.erasurecode.TestCoderBase; +import org.apache.hadoop.test.LambdaTestUtils; import org.junit.Assert; import org.junit.Test; +import java.io.IOException; + /** * Raw coder test base with utilities. */ @@ -104,6 +107,28 @@ protected void testCodingWithBadOutput(boolean usingDirectBuffer) { } } + /** + * Test encode / decode after release(). It should raise IOException. + * + * @throws Exception + */ + void testAfterRelease() throws Exception { + prepareCoders(true); + prepareBufferAllocator(true); + + encoder.release(); + final ECChunk[] data = prepareDataChunksForEncoding(); + final ECChunk[] parity = prepareParityChunksForEncoding(); + LambdaTestUtils.intercept(IOException.class, "closed", + () -> encoder.encode(data, parity)); + + decoder.release(); + final ECChunk[] in = prepareInputChunksForDecoding(data, parity); + final ECChunk[] out = prepareOutputChunksForDecoding(); + LambdaTestUtils.intercept(IOException.class, "closed", + () -> decoder.decode(in, getErasedIndexesForDecoding(), out)); + } + @Test public void testCodingWithErasingTooMany() { try { @@ -121,6 +146,16 @@ public void testCodingWithErasingTooMany() { } } + @Test + public void testIdempotentReleases() { + prepareCoders(true); + + for (int i = 0; i < 3; i++) { + encoder.release(); + decoder.release(); + } + } + private void performTestCoding(int chunkSize, boolean usingSlicedBuffer, boolean useBadInput, boolean useBadOutput, boolean allowChangeInputs) { @@ -144,7 +179,11 @@ private void performTestCoding(int chunkSize, boolean usingSlicedBuffer, ECChunk[] clonedDataChunks = cloneChunksWithData(dataChunks); markChunks(dataChunks); - encoder.encode(dataChunks, parityChunks); + try { + encoder.encode(dataChunks, parityChunks); + } catch (IOException e) { + Assert.fail("Should not get IOException: " + e.getMessage()); + } dumpChunks("Encoded parity chunks", parityChunks); if (!allowChangeInputs) { @@ -174,7 +213,12 @@ private void performTestCoding(int chunkSize, boolean usingSlicedBuffer, } dumpChunks("Decoding input chunks", inputChunks); - decoder.decode(inputChunks, getErasedIndexesForDecoding(), recoveredChunks); + try { + decoder.decode(inputChunks, getErasedIndexesForDecoding(), + recoveredChunks); + } catch (IOException e) { + Assert.fail("Should not get IOException: " + e.getMessage()); + } dumpChunks("Decoded/recovered chunks", recoveredChunks); if (!allowChangeInputs) { @@ -268,7 +312,11 @@ protected void testInputPosition(boolean usingDirectBuffer) { ECChunk[] dataChunks = prepareDataChunksForEncoding(); ECChunk[] parityChunks = prepareParityChunksForEncoding(); ECChunk[] clonedDataChunks = cloneChunksWithData(dataChunks); - encoder.encode(dataChunks, parityChunks); + try { + encoder.encode(dataChunks, parityChunks); + } catch (IOException e) { + Assert.fail("Should not get IOException: " + e.getMessage()); + } verifyBufferPositionAtEnd(dataChunks); // verify decode @@ -277,7 +325,12 @@ protected void testInputPosition(boolean usingDirectBuffer) { clonedDataChunks, parityChunks); ensureOnlyLeastRequiredChunks(inputChunks); ECChunk[] recoveredChunks = prepareOutputChunksForDecoding(); - decoder.decode(inputChunks, getErasedIndexesForDecoding(), recoveredChunks); + try { + decoder.decode(inputChunks, getErasedIndexesForDecoding(), + recoveredChunks); + } catch (IOException e) { + Assert.fail("Should not get IOException: " + e.getMessage()); + } verifyBufferPositionAtEnd(inputChunks); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSStripedOutputStream.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSStripedOutputStream.java index 3eb7e678f9..1b8395908f 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSStripedOutputStream.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/DFSStripedOutputStream.java @@ -355,7 +355,7 @@ private synchronized StripedDataStreamer setCurrentStreamer(int newIdx) { * @param buffers data buffers + parity buffers */ private static void encode(RawErasureEncoder encoder, int numData, - ByteBuffer[] buffers) { + ByteBuffer[] buffers) throws IOException { final ByteBuffer[] dataBuffers = new ByteBuffer[numData]; final ByteBuffer[] parityBuffers = new ByteBuffer[buffers.length - numData]; System.arraycopy(buffers, 0, dataBuffers, 0, dataBuffers.length); diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/PositionStripeReader.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/PositionStripeReader.java index b01b74cfa7..65d2c906a9 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/PositionStripeReader.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/PositionStripeReader.java @@ -27,6 +27,7 @@ import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder; import org.apache.hadoop.hdfs.DFSUtilClient.CorruptedBlocks; +import java.io.IOException; import java.nio.ByteBuffer; /** @@ -68,7 +69,7 @@ boolean prepareParityChunk(int index) { } @Override - void decode() { + void decode() throws IOException { finalizeDecodeInputs(); decodeAndFillBuffer(true); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/StatefulStripeReader.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/StatefulStripeReader.java index 88795144ef..b37501d2e2 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/StatefulStripeReader.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/StatefulStripeReader.java @@ -27,6 +27,7 @@ import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder; import org.apache.hadoop.hdfs.DFSUtilClient.CorruptedBlocks; +import java.io.IOException; import java.nio.ByteBuffer; /** @@ -88,7 +89,7 @@ boolean prepareParityChunk(int index) { } @Override - void decode() { + void decode() throws IOException { finalizeDecodeInputs(); decodeAndFillBuffer(false); } diff --git a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/StripeReader.java b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/StripeReader.java index 5518752079..9a204230ab 100644 --- a/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/StripeReader.java +++ b/hadoop-hdfs-project/hadoop-hdfs-client/src/main/java/org/apache/hadoop/hdfs/StripeReader.java @@ -149,10 +149,11 @@ void skip() { */ abstract boolean prepareParityChunk(int index); - /* + /** * Decode to get the missing data. + * @throws IOException if the decoder is closed. */ - abstract void decode(); + abstract void decode() throws IOException; /* * Default close do nothing. @@ -408,7 +409,7 @@ void finalizeDecodeInputs() { /** * Decode based on the given input buffers and erasure coding policy. */ - void decodeAndFillBuffer(boolean fillBuffer) { + void decodeAndFillBuffer(boolean fillBuffer) throws IOException { // Step 1: prepare indices and output buffers for missing data units int[] decodeIndices = prepareErasedIndices(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/erasurecode/StripedBlockChecksumReconstructor.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/erasurecode/StripedBlockChecksumReconstructor.java index 69173173fe..6e063c154c 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/erasurecode/StripedBlockChecksumReconstructor.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/erasurecode/StripedBlockChecksumReconstructor.java @@ -155,7 +155,7 @@ private long checksumWithTargetOutput(byte[] outputData, int toReconstructLen, return checksumBuf.length; } - private void reconstructTargets(int toReconstructLen) { + private void reconstructTargets(int toReconstructLen) throws IOException { ByteBuffer[] inputs = getStripedReader().getInputBuffers(toReconstructLen); ByteBuffer[] outputs = new ByteBuffer[1]; diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/erasurecode/StripedBlockReconstructor.java b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/erasurecode/StripedBlockReconstructor.java index 34e58ae47b..17b54e8ea7 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/erasurecode/StripedBlockReconstructor.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/datanode/erasurecode/StripedBlockReconstructor.java @@ -113,7 +113,7 @@ void reconstruct() throws IOException { } } - private void reconstructTargets(int toReconstructLen) { + private void reconstructTargets(int toReconstructLen) throws IOException { ByteBuffer[] inputs = getStripedReader().getInputBuffers(toReconstructLen); int[] erasedIndices = stripedWriter.getRealTargetIndices(); diff --git a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/StripedFileTestUtil.java b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/StripedFileTestUtil.java index c771d21ea1..08bf20a3cc 100644 --- a/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/StripedFileTestUtil.java +++ b/hadoop-hdfs-project/hadoop-hdfs/src/test/java/org/apache/hadoop/hdfs/StripedFileTestUtil.java @@ -502,7 +502,11 @@ static void verifyParityBlocks(Configuration conf, final long size, dataBytes.length, parityBytes.length); final RawErasureEncoder encoder = CodecUtil.createRawEncoder(conf, codecName, coderOptions); - encoder.encode(dataBytes, expectedParityBytes); + try { + encoder.encode(dataBytes, expectedParityBytes); + } catch (IOException e) { + Assert.fail("Unexpected IOException: " + e.getMessage()); + } for (int i = 0; i < parityBytes.length; i++) { if (checkSet.contains(i + dataBytes.length)){ Assert.assertArrayEquals("i=" + i, expectedParityBytes[i],