HDFS-14999. Avoid Potential Infinite Loop in DFSNetworkTopology. Contributed by Ayush Saxena.

This commit is contained in:
Ayush Saxena 2020-05-18 21:06:46 +05:30
parent 2abcf7762a
commit c84e6beada

View File

@ -249,17 +249,10 @@ Node chooseRandomWithStorageType(final String scope,
return null; return null;
} }
// to this point, it is guaranteed that there is at least one node // to this point, it is guaranteed that there is at least one node
// that satisfies the requirement, keep trying until we found one. // that satisfies the requirement.
Node chosen; Node chosen =
do { chooseRandomWithStorageTypeAndExcludeRoot(root, excludeRoot, type,
chosen = chooseRandomWithStorageTypeAndExcludeRoot(root, excludeRoot, excludedNodes);
type);
if (excludedNodes == null || !excludedNodes.contains(chosen)) {
break;
} else {
LOG.debug("Node {} is excluded, continuing.", chosen);
}
} while (true);
LOG.debug("chooseRandom returning {}", chosen); LOG.debug("chooseRandom returning {}", chosen);
return chosen; return chosen;
} }
@ -268,23 +261,23 @@ Node chooseRandomWithStorageType(final String scope,
* Choose a random node that has the required storage type, under the given * Choose a random node that has the required storage type, under the given
* root, with an excluded subtree root (could also just be a leaf node). * root, with an excluded subtree root (could also just be a leaf node).
* *
* Note that excludedNode is checked after a random node, so it is not being
* handled here.
*
* @param root the root node where we start searching for a datanode * @param root the root node where we start searching for a datanode
* @param excludeRoot the root of the subtree what should be excluded * @param excludeRoot the root of the subtree what should be excluded
* @param type the expected storage type * @param type the expected storage type
* @param excludedNodes the list of nodes to be excluded
* @return a random datanode, with the storage type, and is not in excluded * @return a random datanode, with the storage type, and is not in excluded
* scope * scope
*/ */
private Node chooseRandomWithStorageTypeAndExcludeRoot( private Node chooseRandomWithStorageTypeAndExcludeRoot(
DFSTopologyNodeImpl root, Node excludeRoot, StorageType type) { DFSTopologyNodeImpl root, Node excludeRoot, StorageType type,
Collection<Node> excludedNodes) {
Node chosenNode; Node chosenNode;
if (root.isRack()) { if (root.isRack()) {
// children are datanode descriptor // children are datanode descriptor
ArrayList<Node> candidates = new ArrayList<>(); ArrayList<Node> candidates = new ArrayList<>();
for (Node node : root.getChildren()) { for (Node node : root.getChildren()) {
if (node.equals(excludeRoot)) { if (node.equals(excludeRoot) || (excludedNodes != null && excludedNodes
.contains(node))) {
continue; continue;
} }
DatanodeDescriptor dnDescriptor = (DatanodeDescriptor)node; DatanodeDescriptor dnDescriptor = (DatanodeDescriptor)node;
@ -301,7 +294,7 @@ private Node chooseRandomWithStorageTypeAndExcludeRoot(
} else { } else {
// the children are inner nodes // the children are inner nodes
ArrayList<DFSTopologyNodeImpl> candidates = ArrayList<DFSTopologyNodeImpl> candidates =
getEligibleChildren(root, excludeRoot, type); getEligibleChildren(root, excludeRoot, type, excludedNodes);
if (candidates.size() == 0) { if (candidates.size() == 0) {
return null; return null;
} }
@ -330,7 +323,7 @@ private Node chooseRandomWithStorageTypeAndExcludeRoot(
} }
DFSTopologyNodeImpl nextRoot = candidates.get(idxChosen); DFSTopologyNodeImpl nextRoot = candidates.get(idxChosen);
chosenNode = chooseRandomWithStorageTypeAndExcludeRoot( chosenNode = chooseRandomWithStorageTypeAndExcludeRoot(
nextRoot, excludeRoot, type); nextRoot, excludeRoot, type, excludedNodes);
} }
return chosenNode; return chosenNode;
} }
@ -343,11 +336,13 @@ private Node chooseRandomWithStorageTypeAndExcludeRoot(
* @param root the subtree root we check. * @param root the subtree root we check.
* @param excludeRoot the root of the subtree that should be excluded. * @param excludeRoot the root of the subtree that should be excluded.
* @param type the storage type we look for. * @param type the storage type we look for.
* @param excludedNodes the list of excluded nodes.
* @return a list of possible nodes, each of them is eligible as the next * @return a list of possible nodes, each of them is eligible as the next
* level root we search. * level root we search.
*/ */
private ArrayList<DFSTopologyNodeImpl> getEligibleChildren( private ArrayList<DFSTopologyNodeImpl> getEligibleChildren(
DFSTopologyNodeImpl root, Node excludeRoot, StorageType type) { DFSTopologyNodeImpl root, Node excludeRoot, StorageType type,
Collection<Node> excludedNodes) {
ArrayList<DFSTopologyNodeImpl> candidates = new ArrayList<>(); ArrayList<DFSTopologyNodeImpl> candidates = new ArrayList<>();
int excludeCount = 0; int excludeCount = 0;
if (excludeRoot != null && root.isAncestor(excludeRoot)) { if (excludeRoot != null && root.isAncestor(excludeRoot)) {
@ -374,6 +369,24 @@ private ArrayList<DFSTopologyNodeImpl> getEligibleChildren(
(dfsNode.isAncestor(excludeRoot) || dfsNode.equals(excludeRoot))) { (dfsNode.isAncestor(excludeRoot) || dfsNode.equals(excludeRoot))) {
storageCount -= excludeCount; storageCount -= excludeCount;
} }
if (excludedNodes != null) {
for (Node excludedNode : excludedNodes) {
if (excludeRoot != null && isNodeInScope(excludedNode,
NodeBase.getPath(excludeRoot))) {
continue;
}
if (isNodeInScope(excludedNode, NodeBase.getPath(node))) {
if (excludedNode instanceof DatanodeDescriptor) {
storageCount -=
((DatanodeDescriptor) excludedNode).hasStorageType(type) ?
1 : 0;
} else if (excludedNode instanceof DFSTopologyNodeImpl) {
storageCount -= ((DFSTopologyNodeImpl) excludedNode)
.getSubtreeStorageCount(type);
}
}
}
}
if (storageCount > 0) { if (storageCount > 0) {
candidates.add(dfsNode); candidates.add(dfsNode);
} }