HDFS-5435. File append fails to initialize storageIDs. (Junping Du)

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-2832@1536434 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Arpit Agarwal 2013-10-28 17:29:06 +00:00
parent 67b93d6e25
commit dc0b44a884
3 changed files with 5 additions and 2 deletions

View File

@ -51,3 +51,6 @@ IMPROVEMENTS:
HDFS-5417. Fix storage IDs in PBHelper and UpgradeUtilities. (szetszwo)
HDFS-5214. Fix NPEs in BlockManager and DirectoryScanner. (Arpit Agarwal)
HDFS-5435. File append fails to initialize storageIDs. (Junping Du via
Arpit Agarwal)

View File

@ -311,7 +311,6 @@ class DataStreamer extends Daemon {
private DataInputStream blockReplyStream;
private ResponseProcessor response = null;
private volatile DatanodeInfo[] nodes = null; // list of targets for current block
//TODO: update storage IDs
private volatile String[] storageIDs = null;
private LoadingCache<DatanodeInfo, DatanodeInfo> excludedNodes =
CacheBuilder.newBuilder()
@ -404,6 +403,7 @@ private DataStreamer(LocatedBlock lastBlock, HdfsFileStatus stat,
// setup pipeline to append to the last block XXX retries??
nodes = lastBlock.getLocations();
storageIDs = lastBlock.getStorageIDs();
errorIndex = -1; // no errors yet.
if (nodes.length < 1) {
throw new IOException("Unable to retrieve blocks locations " +

View File

@ -788,7 +788,7 @@ public void updatePipeline(String clientName, ExtendedBlock oldBlock,
.setOldBlock(PBHelper.convert(oldBlock))
.setNewBlock(PBHelper.convert(newBlock))
.addAllNewNodes(Arrays.asList(PBHelper.convert(newNodes)))
.addAllStorageIDs(Arrays.asList(storageIDs))
.addAllStorageIDs(storageIDs == null ? null : Arrays.asList(storageIDs))
.build();
try {
rpcProxy.updatePipeline(null, req);