HDFS-5435. File append fails to initialize storageIDs. (Junping Du)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/HDFS-2832@1536434 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
67b93d6e25
commit
dc0b44a884
|
@ -51,3 +51,6 @@ IMPROVEMENTS:
|
|||
HDFS-5417. Fix storage IDs in PBHelper and UpgradeUtilities. (szetszwo)
|
||||
|
||||
HDFS-5214. Fix NPEs in BlockManager and DirectoryScanner. (Arpit Agarwal)
|
||||
|
||||
HDFS-5435. File append fails to initialize storageIDs. (Junping Du via
|
||||
Arpit Agarwal)
|
||||
|
|
|
@ -311,7 +311,6 @@ public class DFSOutputStream extends FSOutputSummer
|
|||
private DataInputStream blockReplyStream;
|
||||
private ResponseProcessor response = null;
|
||||
private volatile DatanodeInfo[] nodes = null; // list of targets for current block
|
||||
//TODO: update storage IDs
|
||||
private volatile String[] storageIDs = null;
|
||||
private LoadingCache<DatanodeInfo, DatanodeInfo> excludedNodes =
|
||||
CacheBuilder.newBuilder()
|
||||
|
@ -404,6 +403,7 @@ public class DFSOutputStream extends FSOutputSummer
|
|||
|
||||
// setup pipeline to append to the last block XXX retries??
|
||||
nodes = lastBlock.getLocations();
|
||||
storageIDs = lastBlock.getStorageIDs();
|
||||
errorIndex = -1; // no errors yet.
|
||||
if (nodes.length < 1) {
|
||||
throw new IOException("Unable to retrieve blocks locations " +
|
||||
|
|
|
@ -788,7 +788,7 @@ public class ClientNamenodeProtocolTranslatorPB implements
|
|||
.setOldBlock(PBHelper.convert(oldBlock))
|
||||
.setNewBlock(PBHelper.convert(newBlock))
|
||||
.addAllNewNodes(Arrays.asList(PBHelper.convert(newNodes)))
|
||||
.addAllStorageIDs(Arrays.asList(storageIDs))
|
||||
.addAllStorageIDs(storageIDs == null ? null : Arrays.asList(storageIDs))
|
||||
.build();
|
||||
try {
|
||||
rpcProxy.updatePipeline(null, req);
|
||||
|
|
Loading…
Reference in New Issue