HDFS-16538. EC decoding failed due to not enough valid inputs (#4167)
Co-authored-by: liubingxing <liubingxing@bigo.sg>
This commit is contained in:
parent
d7fd61d616
commit
52e152f8b0
@ -52,7 +52,9 @@ void prepareDecodeInputs() {
|
|||||||
cur = dfsStripedInputStream.getCurStripeBuf().duplicate();
|
cur = dfsStripedInputStream.getCurStripeBuf().duplicate();
|
||||||
}
|
}
|
||||||
|
|
||||||
this.decodeInputs = new ECChunk[dataBlkNum + parityBlkNum];
|
if (this.decodeInputs == null) {
|
||||||
|
this.decodeInputs = new ECChunk[dataBlkNum + parityBlkNum];
|
||||||
|
}
|
||||||
int bufLen = (int) alignedStripe.getSpanInBlock();
|
int bufLen = (int) alignedStripe.getSpanInBlock();
|
||||||
int bufOff = (int) alignedStripe.getOffsetInBlock();
|
int bufOff = (int) alignedStripe.getOffsetInBlock();
|
||||||
for (int i = 0; i < dataBlkNum; i++) {
|
for (int i = 0; i < dataBlkNum; i++) {
|
||||||
|
@ -44,6 +44,7 @@
|
|||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
|
|
||||||
|
import static org.apache.hadoop.hdfs.ReadStripedFileWithDecodingHelper.BLOCK_SIZE;
|
||||||
import static org.apache.hadoop.hdfs.ReadStripedFileWithDecodingHelper.CELL_SIZE;
|
import static org.apache.hadoop.hdfs.ReadStripedFileWithDecodingHelper.CELL_SIZE;
|
||||||
import static org.apache.hadoop.hdfs.ReadStripedFileWithDecodingHelper.NUM_DATA_UNITS;
|
import static org.apache.hadoop.hdfs.ReadStripedFileWithDecodingHelper.NUM_DATA_UNITS;
|
||||||
import static org.apache.hadoop.hdfs.ReadStripedFileWithDecodingHelper.NUM_PARITY_UNITS;
|
import static org.apache.hadoop.hdfs.ReadStripedFileWithDecodingHelper.NUM_PARITY_UNITS;
|
||||||
@ -165,4 +166,21 @@ public void testInvalidateBlock() throws IOException, InterruptedException {
|
|||||||
DataNodeTestUtils.setHeartbeatsDisabledForTests(dn, false);
|
DataNodeTestUtils.setHeartbeatsDisabledForTests(dn, false);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testMoreThanOneCorruptedBlock() throws IOException {
|
||||||
|
final Path file = new Path("/corrupted");
|
||||||
|
final int length = BLOCK_SIZE * NUM_DATA_UNITS;
|
||||||
|
final byte[] bytes = StripedFileTestUtil.generateBytes(length);
|
||||||
|
DFSTestUtil.writeFile(dfs, file, bytes);
|
||||||
|
|
||||||
|
// read the file with more than one corrupted data block
|
||||||
|
byte[] buffer = new byte[length + 100];
|
||||||
|
for (int count = 2; count < NUM_PARITY_UNITS; ++count) {
|
||||||
|
ReadStripedFileWithDecodingHelper.corruptBlocks(cluster, dfs, file, count, 0,
|
||||||
|
false);
|
||||||
|
StripedFileTestUtil.verifyStatefulRead(dfs, file, length, bytes,
|
||||||
|
buffer);
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user