HDFS-554. Use System.arraycopy in BlockInfo.ensureCapacity. (harsh)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1226239 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
11905517f1
commit
8e8203f3c5
@ -131,6 +131,8 @@ Trunk (unreleased changes)
|
|||||||
HDFS-2476. More CPU efficient data structure for under-replicated,
|
HDFS-2476. More CPU efficient data structure for under-replicated,
|
||||||
over-replicated, and invalidated blocks. (Tomasz Nykiel via todd)
|
over-replicated, and invalidated blocks. (Tomasz Nykiel via todd)
|
||||||
|
|
||||||
|
HDFS-554. Use System.arraycopy in BlockInfo.ensureCapacity. (harsh)
|
||||||
|
|
||||||
BUG FIXES
|
BUG FIXES
|
||||||
HDFS-2299. TestOfflineEditsViewer is failing on trunk. (Uma Maheswara Rao G
|
HDFS-2299. TestOfflineEditsViewer is failing on trunk. (Uma Maheswara Rao G
|
||||||
via atm)
|
via atm)
|
||||||
|
@ -173,9 +173,7 @@ private int ensureCapacity(int num) {
|
|||||||
* happen only when replication is manually increased by the user. */
|
* happen only when replication is manually increased by the user. */
|
||||||
Object[] old = triplets;
|
Object[] old = triplets;
|
||||||
triplets = new Object[(last+num)*3];
|
triplets = new Object[(last+num)*3];
|
||||||
for(int i=0; i < last*3; i++) {
|
System.arraycopy(old, 0, triplets, 0, last*3);
|
||||||
triplets[i] = old[i];
|
|
||||||
}
|
|
||||||
return last;
|
return last;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user