HDFS-8113. Add check for null BlockCollection pointers in BlockInfoContiguous structures (Chengbing Liu via Colin P. McCabe)

This commit is contained in:
Colin Patrick Mccabe 2015-05-08 13:50:51 -07:00
parent c39012f4a0
commit f523e963e4
3 changed files with 16 additions and 1 deletions

View File

@ -530,6 +530,9 @@ Release 2.8.0 - UNRELEASED
HDFS-8284. Update documentation about how to use HTrace with HDFS (Masatake HDFS-8284. Update documentation about how to use HTrace with HDFS (Masatake
Iwasaki via Colin P. McCabe) Iwasaki via Colin P. McCabe)
HDFS-8113. Add check for null BlockCollection pointers in
BlockInfoContiguous structures (Chengbing Liu via Colin P. McCabe)
OPTIMIZATIONS OPTIMIZATIONS
HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than HDFS-8026. Trace FSOutputSummer#writeChecksumChunks rather than

View File

@ -74,7 +74,8 @@ public BlockInfoContiguous(Block blk, short replication) {
* @param from BlockInfo to copy from. * @param from BlockInfo to copy from.
*/ */
protected BlockInfoContiguous(BlockInfoContiguous from) { protected BlockInfoContiguous(BlockInfoContiguous from) {
this(from, from.bc.getBlockReplication()); super(from);
this.triplets = new Object[from.triplets.length];
this.bc = from.bc; this.bc = from.bc;
} }

View File

@ -69,6 +69,17 @@ public void testAddStorage() throws Exception {
Assert.assertEquals(storage, blockInfo.getStorageInfo(0)); Assert.assertEquals(storage, blockInfo.getStorageInfo(0));
} }
@Test
public void testCopyConstructor() {
BlockInfoContiguous old = new BlockInfoContiguous((short) 3);
try {
BlockInfoContiguous copy = new BlockInfoContiguous(old);
assertEquals(old.getBlockCollection(), copy.getBlockCollection());
assertEquals(old.getCapacity(), copy.getCapacity());
} catch (Exception e) {
Assert.fail("Copy constructor throws exception: " + e);
}
}
@Test @Test
public void testReplaceStorage() throws Exception { public void testReplaceStorage() throws Exception {