HDFS-16544. EC decoding failed due to invalid buffer (#4179)

This commit is contained in:
qinyuren 2022-04-20 14:04:27 +08:00 committed by GitHub
parent 98b9c435f2
commit 76bbd17374
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
2 changed files with 38 additions and 5 deletions

View File

@ -74,11 +74,6 @@ void prepareDecodeInputs() {
boolean prepareParityChunk(int index) { boolean prepareParityChunk(int index) {
Preconditions.checkState(index >= dataBlkNum Preconditions.checkState(index >= dataBlkNum
&& alignedStripe.chunks[index] == null); && alignedStripe.chunks[index] == null);
if (readerInfos[index] != null && readerInfos[index].shouldSkip) {
alignedStripe.chunks[index] = new StripingChunk(StripingChunk.MISSING);
// we have failed the block reader before
return false;
}
final int parityIndex = index - dataBlkNum; final int parityIndex = index - dataBlkNum;
ByteBuffer buf = dfsStripedInputStream.getParityBuffer().duplicate(); ByteBuffer buf = dfsStripedInputStream.getParityBuffer().duplicate();
buf.position(cellSize * parityIndex); buf.position(cellSize * parityIndex);

View File

@ -19,7 +19,9 @@
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.ExtendedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.protocol.LocatedBlocks;
import org.apache.hadoop.hdfs.protocol.LocatedStripedBlock; import org.apache.hadoop.hdfs.protocol.LocatedStripedBlock;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo;
import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager; import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager;
@ -183,4 +185,40 @@ public void testMoreThanOneCorruptedBlock() throws IOException {
buffer); buffer);
} }
} }
@Test
public void testReadWithCorruptedDataBlockAndParityBlock() throws IOException {
final Path file = new Path("/corruptedDataBlockAndParityBlock");
final int length = BLOCK_SIZE * NUM_DATA_UNITS;
final byte[] bytes = StripedFileTestUtil.generateBytes(length);
DFSTestUtil.writeFile(dfs, file, bytes);
// set one dataBlock and the first parityBlock corrupted
int dataBlkDelNum = 1;
int parityBlkDelNum = 1;
int recoverBlkNum = dataBlkDelNum + parityBlkDelNum;
int[] dataBlkIndices = {0};
int[] parityBlkIndices = {6};
LocatedBlocks locatedBlocks = ReadStripedFileWithDecodingHelper.getLocatedBlocks(dfs, file);
LocatedStripedBlock lastBlock =
(LocatedStripedBlock)locatedBlocks.getLastLocatedBlock();
int[] delBlkIndices = new int[recoverBlkNum];
System.arraycopy(dataBlkIndices, 0,
delBlkIndices, 0, dataBlkIndices.length);
System.arraycopy(parityBlkIndices, 0,
delBlkIndices, dataBlkIndices.length, parityBlkIndices.length);
ExtendedBlock[] delBlocks = new ExtendedBlock[recoverBlkNum];
for (int i = 0; i < recoverBlkNum; i++) {
delBlocks[i] = StripedBlockUtil
.constructInternalBlock(lastBlock.getBlock(),
CELL_SIZE, NUM_DATA_UNITS, delBlkIndices[i]);
cluster.corruptBlockOnDataNodes(delBlocks[i]);
}
byte[] buffer = new byte[length + 100];
StripedFileTestUtil.verifyStatefulRead(dfs, file, length, bytes,
buffer);
}
} }