HDFS-7434. DatanodeID hashCode should not be mutable. Contributed by Daryn Sharp.
This commit is contained in:
parent
c66c3ac6bf
commit
722b479469
@ -1091,6 +1091,8 @@ Release 2.7.0 - UNRELEASED
|
|||||||
HDFS-7879. hdfs.dll does not export functions of the public libhdfs API.
|
HDFS-7879. hdfs.dll does not export functions of the public libhdfs API.
|
||||||
(Chris Nauroth via wheat9)
|
(Chris Nauroth via wheat9)
|
||||||
|
|
||||||
|
HDFS-7434. DatanodeID hashCode should not be mutable. (daryn via kihwal)
|
||||||
|
|
||||||
BREAKDOWN OF HDFS-7584 SUBTASKS AND RELATED JIRAS
|
BREAKDOWN OF HDFS-7584 SUBTASKS AND RELATED JIRAS
|
||||||
|
|
||||||
HDFS-7720. Quota by Storage Type API, tools and ClientNameNode
|
HDFS-7720. Quota by Storage Type API, tools and ClientNameNode
|
||||||
|
@ -47,19 +47,23 @@ public class DatanodeID implements Comparable<DatanodeID> {
|
|||||||
private int infoSecurePort; // info server port
|
private int infoSecurePort; // info server port
|
||||||
private int ipcPort; // IPC server port
|
private int ipcPort; // IPC server port
|
||||||
private String xferAddr;
|
private String xferAddr;
|
||||||
private int hashCode = -1;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* UUID identifying a given datanode. For upgraded Datanodes this is the
|
* UUID identifying a given datanode. For upgraded Datanodes this is the
|
||||||
* same as the StorageID that was previously used by this Datanode.
|
* same as the StorageID that was previously used by this Datanode.
|
||||||
* For newly formatted Datanodes it is a UUID.
|
* For newly formatted Datanodes it is a UUID.
|
||||||
*/
|
*/
|
||||||
private String datanodeUuid = null;
|
private final String datanodeUuid;
|
||||||
|
|
||||||
public DatanodeID(DatanodeID from) {
|
public DatanodeID(DatanodeID from) {
|
||||||
|
this(from.getDatanodeUuid(), from);
|
||||||
|
}
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
public DatanodeID(String datanodeUuid, DatanodeID from) {
|
||||||
this(from.getIpAddr(),
|
this(from.getIpAddr(),
|
||||||
from.getHostName(),
|
from.getHostName(),
|
||||||
from.getDatanodeUuid(),
|
datanodeUuid,
|
||||||
from.getXferPort(),
|
from.getXferPort(),
|
||||||
from.getInfoPort(),
|
from.getInfoPort(),
|
||||||
from.getInfoSecurePort(),
|
from.getInfoSecurePort(),
|
||||||
@ -81,19 +85,24 @@ public DatanodeID(DatanodeID from) {
|
|||||||
*/
|
*/
|
||||||
public DatanodeID(String ipAddr, String hostName, String datanodeUuid,
|
public DatanodeID(String ipAddr, String hostName, String datanodeUuid,
|
||||||
int xferPort, int infoPort, int infoSecurePort, int ipcPort) {
|
int xferPort, int infoPort, int infoSecurePort, int ipcPort) {
|
||||||
this.ipAddr = ipAddr;
|
setIpAndXferPort(ipAddr, xferPort);
|
||||||
this.hostName = hostName;
|
this.hostName = hostName;
|
||||||
this.datanodeUuid = checkDatanodeUuid(datanodeUuid);
|
this.datanodeUuid = checkDatanodeUuid(datanodeUuid);
|
||||||
this.xferPort = xferPort;
|
|
||||||
this.infoPort = infoPort;
|
this.infoPort = infoPort;
|
||||||
this.infoSecurePort = infoSecurePort;
|
this.infoSecurePort = infoSecurePort;
|
||||||
this.ipcPort = ipcPort;
|
this.ipcPort = ipcPort;
|
||||||
updateXferAddrAndInvalidateHashCode();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setIpAddr(String ipAddr) {
|
public void setIpAddr(String ipAddr) {
|
||||||
|
//updated during registration, preserve former xferPort
|
||||||
|
setIpAndXferPort(ipAddr, xferPort);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void setIpAndXferPort(String ipAddr, int xferPort) {
|
||||||
|
// build xferAddr string to reduce cost of frequent use
|
||||||
this.ipAddr = ipAddr;
|
this.ipAddr = ipAddr;
|
||||||
updateXferAddrAndInvalidateHashCode();
|
this.xferPort = xferPort;
|
||||||
|
this.xferAddr = ipAddr + ":" + xferPort;
|
||||||
}
|
}
|
||||||
|
|
||||||
public void setPeerHostName(String peerHostName) {
|
public void setPeerHostName(String peerHostName) {
|
||||||
@ -107,12 +116,6 @@ public String getDatanodeUuid() {
|
|||||||
return datanodeUuid;
|
return datanodeUuid;
|
||||||
}
|
}
|
||||||
|
|
||||||
@VisibleForTesting
|
|
||||||
public void setDatanodeUuidForTesting(String datanodeUuid) {
|
|
||||||
this.datanodeUuid = datanodeUuid;
|
|
||||||
updateXferAddrAndInvalidateHashCode();
|
|
||||||
}
|
|
||||||
|
|
||||||
private String checkDatanodeUuid(String uuid) {
|
private String checkDatanodeUuid(String uuid) {
|
||||||
if (uuid == null || uuid.isEmpty()) {
|
if (uuid == null || uuid.isEmpty()) {
|
||||||
return null;
|
return null;
|
||||||
@ -242,11 +245,7 @@ public boolean equals(Object to) {
|
|||||||
|
|
||||||
@Override
|
@Override
|
||||||
public int hashCode() {
|
public int hashCode() {
|
||||||
if (hashCode == -1) {
|
return datanodeUuid.hashCode();
|
||||||
int newHashCode = xferAddr.hashCode() ^ datanodeUuid.hashCode();
|
|
||||||
hashCode = newHashCode & Integer.MAX_VALUE;
|
|
||||||
}
|
|
||||||
return hashCode;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
@ -259,14 +258,12 @@ public String toString() {
|
|||||||
* Note that this does not update storageID.
|
* Note that this does not update storageID.
|
||||||
*/
|
*/
|
||||||
public void updateRegInfo(DatanodeID nodeReg) {
|
public void updateRegInfo(DatanodeID nodeReg) {
|
||||||
ipAddr = nodeReg.getIpAddr();
|
setIpAndXferPort(nodeReg.getIpAddr(), nodeReg.getXferPort());
|
||||||
hostName = nodeReg.getHostName();
|
hostName = nodeReg.getHostName();
|
||||||
peerHostName = nodeReg.getPeerHostName();
|
peerHostName = nodeReg.getPeerHostName();
|
||||||
xferPort = nodeReg.getXferPort();
|
|
||||||
infoPort = nodeReg.getInfoPort();
|
infoPort = nodeReg.getInfoPort();
|
||||||
infoSecurePort = nodeReg.getInfoSecurePort();
|
infoSecurePort = nodeReg.getInfoSecurePort();
|
||||||
ipcPort = nodeReg.getIpcPort();
|
ipcPort = nodeReg.getIpcPort();
|
||||||
updateXferAddrAndInvalidateHashCode();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -279,13 +276,4 @@ public void updateRegInfo(DatanodeID nodeReg) {
|
|||||||
public int compareTo(DatanodeID that) {
|
public int compareTo(DatanodeID that) {
|
||||||
return getXferAddr().compareTo(that.getXferAddr());
|
return getXferAddr().compareTo(that.getXferAddr());
|
||||||
}
|
}
|
||||||
|
|
||||||
// NOTE: mutable hash codes are dangerous, however this class chooses to
|
|
||||||
// use them. this method must be called when a value mutates that is used
|
|
||||||
// to compute the hash, equality, or comparison of instances.
|
|
||||||
private void updateXferAddrAndInvalidateHashCode() {
|
|
||||||
xferAddr = ipAddr + ":" + xferPort;
|
|
||||||
// can't compute new hash yet because uuid might still null...
|
|
||||||
hashCode = -1;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -25,6 +25,8 @@
|
|||||||
import org.apache.hadoop.hdfs.server.common.Storage;
|
import org.apache.hadoop.hdfs.server.common.Storage;
|
||||||
import org.apache.hadoop.hdfs.server.common.StorageInfo;
|
import org.apache.hadoop.hdfs.server.common.StorageInfo;
|
||||||
|
|
||||||
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* DatanodeRegistration class contains all information the name-node needs
|
* DatanodeRegistration class contains all information the name-node needs
|
||||||
* to identify and verify a data-node when it contacts the name-node.
|
* to identify and verify a data-node when it contacts the name-node.
|
||||||
@ -39,6 +41,14 @@ public class DatanodeRegistration extends DatanodeID
|
|||||||
private ExportedBlockKeys exportedKeys;
|
private ExportedBlockKeys exportedKeys;
|
||||||
private final String softwareVersion;
|
private final String softwareVersion;
|
||||||
|
|
||||||
|
@VisibleForTesting
|
||||||
|
public DatanodeRegistration(String uuid, DatanodeRegistration dnr) {
|
||||||
|
this(new DatanodeID(uuid, dnr),
|
||||||
|
dnr.getStorageInfo(),
|
||||||
|
dnr.getExportedKeys(),
|
||||||
|
dnr.getSoftwareVersion());
|
||||||
|
}
|
||||||
|
|
||||||
public DatanodeRegistration(DatanodeID dn, StorageInfo info,
|
public DatanodeRegistration(DatanodeID dn, StorageInfo info,
|
||||||
ExportedBlockKeys keys, String softwareVersion) {
|
ExportedBlockKeys keys, String softwareVersion) {
|
||||||
super(dn);
|
super(dn);
|
||||||
|
@ -538,10 +538,6 @@ public void testHighestPriReplSrcChosenDespiteMaxReplLimit() throws Exception {
|
|||||||
public void testSafeModeIBR() throws Exception {
|
public void testSafeModeIBR() throws Exception {
|
||||||
DatanodeDescriptor node = spy(nodes.get(0));
|
DatanodeDescriptor node = spy(nodes.get(0));
|
||||||
DatanodeStorageInfo ds = node.getStorageInfos()[0];
|
DatanodeStorageInfo ds = node.getStorageInfos()[0];
|
||||||
|
|
||||||
// TODO: Needs to be fixed. DatanodeUuid is not storageID.
|
|
||||||
node.setDatanodeUuidForTesting(ds.getStorageID());
|
|
||||||
|
|
||||||
node.isAlive = true;
|
node.isAlive = true;
|
||||||
|
|
||||||
DatanodeRegistration nodeReg =
|
DatanodeRegistration nodeReg =
|
||||||
@ -587,9 +583,6 @@ public void testSafeModeIBRAfterIncremental() throws Exception {
|
|||||||
DatanodeDescriptor node = spy(nodes.get(0));
|
DatanodeDescriptor node = spy(nodes.get(0));
|
||||||
DatanodeStorageInfo ds = node.getStorageInfos()[0];
|
DatanodeStorageInfo ds = node.getStorageInfos()[0];
|
||||||
|
|
||||||
// TODO: Needs to be fixed. DatanodeUuid is not storageID.
|
|
||||||
node.setDatanodeUuidForTesting(ds.getStorageID());
|
|
||||||
|
|
||||||
node.isAlive = true;
|
node.isAlive = true;
|
||||||
|
|
||||||
DatanodeRegistration nodeReg =
|
DatanodeRegistration nodeReg =
|
||||||
|
@ -38,7 +38,6 @@
|
|||||||
import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
|
import org.apache.hadoop.hdfs.server.protocol.DatanodeRegistration;
|
||||||
import org.apache.hadoop.util.VersionInfo;
|
import org.apache.hadoop.util.VersionInfo;
|
||||||
import org.junit.After;
|
import org.junit.After;
|
||||||
import org.junit.Assert;
|
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.mockito.internal.util.reflection.Whitebox;
|
import org.mockito.internal.util.reflection.Whitebox;
|
||||||
@ -119,10 +118,17 @@ public void testCompInvalidate() throws Exception {
|
|||||||
public void testDatanodeReformat() throws Exception {
|
public void testDatanodeReformat() throws Exception {
|
||||||
namesystem.writeLock();
|
namesystem.writeLock();
|
||||||
try {
|
try {
|
||||||
|
// Change the datanode UUID to emulate a reformat
|
||||||
|
String poolId = cluster.getNamesystem().getBlockPoolId();
|
||||||
|
DatanodeRegistration dnr = cluster.getDataNode(nodes[0].getIpcPort())
|
||||||
|
.getDNRegistrationForBP(poolId);
|
||||||
|
dnr = new DatanodeRegistration(UUID.randomUUID().toString(), dnr);
|
||||||
|
cluster.stopDataNode(nodes[0].getXferAddr());
|
||||||
|
|
||||||
Block block = new Block(0, 0, GenerationStamp.LAST_RESERVED_STAMP);
|
Block block = new Block(0, 0, GenerationStamp.LAST_RESERVED_STAMP);
|
||||||
bm.addToInvalidates(block, nodes[0]);
|
bm.addToInvalidates(block, nodes[0]);
|
||||||
// Change the datanode UUID to emulate a reformation
|
bm.getDatanodeManager().registerDatanode(dnr);
|
||||||
nodes[0].setDatanodeUuidForTesting("fortesting");
|
|
||||||
// Since UUID has changed, the invalidation work should be skipped
|
// Since UUID has changed, the invalidation work should be skipped
|
||||||
assertEquals(0, bm.computeInvalidateWork(1));
|
assertEquals(0, bm.computeInvalidateWork(1));
|
||||||
assertEquals(0, bm.getPendingDeletionBlocksCount());
|
assertEquals(0, bm.getPendingDeletionBlocksCount());
|
||||||
@ -158,8 +164,8 @@ public void testDatanodeReRegistration() throws Exception {
|
|||||||
// Re-register each DN and see that it wipes the invalidation work
|
// Re-register each DN and see that it wipes the invalidation work
|
||||||
for (DataNode dn : cluster.getDataNodes()) {
|
for (DataNode dn : cluster.getDataNodes()) {
|
||||||
DatanodeID did = dn.getDatanodeId();
|
DatanodeID did = dn.getDatanodeId();
|
||||||
did.setDatanodeUuidForTesting(UUID.randomUUID().toString());
|
DatanodeRegistration reg = new DatanodeRegistration(
|
||||||
DatanodeRegistration reg = new DatanodeRegistration(did,
|
new DatanodeID(UUID.randomUUID().toString(), did),
|
||||||
new StorageInfo(HdfsServerConstants.NodeType.DATA_NODE),
|
new StorageInfo(HdfsServerConstants.NodeType.DATA_NODE),
|
||||||
new ExportedBlockKeys(),
|
new ExportedBlockKeys(),
|
||||||
VersionInfo.getVersion());
|
VersionInfo.getVersion());
|
||||||
|
@ -173,7 +173,8 @@ public DatanodeRegistration answer(InvocationOnMock invocation)
|
|||||||
} else {
|
} else {
|
||||||
DatanodeRegistration dr =
|
DatanodeRegistration dr =
|
||||||
(DatanodeRegistration) invocation.getArguments()[0];
|
(DatanodeRegistration) invocation.getArguments()[0];
|
||||||
datanodeRegistration.setDatanodeUuidForTesting(dr.getDatanodeUuid());
|
datanodeRegistration =
|
||||||
|
new DatanodeRegistration(dr.getDatanodeUuid(), dr);
|
||||||
LOG.info("mockito succeeded " + datanodeRegistration);
|
LOG.info("mockito succeeded " + datanodeRegistration);
|
||||||
return datanodeRegistration;
|
return datanodeRegistration;
|
||||||
}
|
}
|
||||||
|
Loading…
Reference in New Issue
Block a user