YARN-7892. Revisit NodeAttribute class structure. Contributed by Naganarasimha G R.
This commit is contained in:
parent
5dc7d6e0f3
commit
8cf6a9a2bd
@ -59,10 +59,13 @@
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerReport;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttribute;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeKey;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeInfo;
|
||||
import org.apache.hadoop.yarn.api.records.NodeId;
|
||||
import org.apache.hadoop.yarn.api.records.NodeLabel;
|
||||
import org.apache.hadoop.yarn.api.records.NodeReport;
|
||||
import org.apache.hadoop.yarn.api.records.NodeState;
|
||||
import org.apache.hadoop.yarn.api.records.NodeToAttributeValue;
|
||||
import org.apache.hadoop.yarn.api.records.Priority;
|
||||
import org.apache.hadoop.yarn.api.records.QueueUserACLInfo;
|
||||
import org.apache.hadoop.yarn.api.records.Resource;
|
||||
@ -541,14 +544,14 @@ public List<ResourceTypeInfo> getResourceTypeInfo()
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<NodeAttribute> getClusterAttributes()
|
||||
public Set<NodeAttributeInfo> getClusterAttributes()
|
||||
throws YarnException, IOException {
|
||||
return client.getClusterAttributes();
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<NodeAttribute, Set<String>> getAttributesToNodes(
|
||||
Set<NodeAttribute> attributes) throws YarnException, IOException {
|
||||
public Map<NodeAttributeKey, List<NodeToAttributeValue>> getAttributesToNodes(
|
||||
Set<NodeAttributeKey> attributes) throws YarnException, IOException {
|
||||
return client.getAttributesToNodes(attributes);
|
||||
}
|
||||
|
||||
|
@ -17,22 +17,24 @@
|
||||
*/
|
||||
package org.apache.hadoop.yarn.api.protocolrecords;
|
||||
|
||||
import static org.apache.hadoop.classification.InterfaceAudience.*;
|
||||
import static org.apache.hadoop.classification.InterfaceStability.*;
|
||||
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttribute;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Evolving;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeKey;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* The request from clients to get attribtues to nodes mapping
|
||||
* in the cluster from the <code>ResourceManager</code>.
|
||||
* The request from clients to get node to attribute value mapping for all or
|
||||
* given set of Node AttributeKey's in the cluster from the
|
||||
* <code>ResourceManager</code>.
|
||||
* </p>
|
||||
*
|
||||
* @see ApplicationClientProtocol#getAttributesToNodes
|
||||
* (GetAttributesToNodesRequest)
|
||||
* (GetAttributesToNodesRequest)
|
||||
*/
|
||||
@Public
|
||||
@Evolving
|
||||
@ -43,7 +45,7 @@ public static GetAttributesToNodesRequest newInstance() {
|
||||
}
|
||||
|
||||
public static GetAttributesToNodesRequest newInstance(
|
||||
Set<NodeAttribute> attributes) {
|
||||
Set<NodeAttributeKey> attributes) {
|
||||
GetAttributesToNodesRequest request =
|
||||
Records.newRecord(GetAttributesToNodesRequest.class);
|
||||
request.setNodeAttributes(attributes);
|
||||
@ -51,20 +53,22 @@ public static GetAttributesToNodesRequest newInstance(
|
||||
}
|
||||
|
||||
/**
|
||||
* Set node attributes for which the mapping is required.
|
||||
* Set node attributeKeys for which the mapping of hostname to attribute value
|
||||
* is required.
|
||||
*
|
||||
* @param attributes Set<NodeAttribute> provided.
|
||||
* @param attributes Set<NodeAttributeKey> provided.
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setNodeAttributes(Set<NodeAttribute> attributes);
|
||||
public abstract void setNodeAttributes(Set<NodeAttributeKey> attributes);
|
||||
|
||||
/**
|
||||
* Get node attributes for which mapping mapping is required.
|
||||
* Get node attributeKeys for which mapping of hostname to attribute value is
|
||||
* required.
|
||||
*
|
||||
* @return Set<NodeAttribute>
|
||||
* @return Set<NodeAttributeKey>
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract Set<NodeAttribute> getNodeAttributes();
|
||||
public abstract Set<NodeAttributeKey> getNodeAttributes();
|
||||
}
|
||||
|
@ -17,29 +17,30 @@
|
||||
*/
|
||||
package org.apache.hadoop.yarn.api.protocolrecords;
|
||||
|
||||
import static org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import static org.apache.hadoop.classification.InterfaceStability.Evolving;
|
||||
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttribute;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Evolving;
|
||||
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeKey;
|
||||
import org.apache.hadoop.yarn.api.records.NodeToAttributeValue;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* The response sent by the <code>ResourceManager</code> to a client requesting
|
||||
* attributes to hostname mapping.
|
||||
* node to attribute value mapping for all or given set of Node AttributeKey's.
|
||||
* </p>
|
||||
*
|
||||
* @see ApplicationClientProtocol#getAttributesToNodes
|
||||
* (GetAttributesToNodesRequest)
|
||||
* (GetAttributesToNodesRequest)
|
||||
*/
|
||||
@Public
|
||||
@Evolving
|
||||
public abstract class GetAttributesToNodesResponse {
|
||||
public static GetAttributesToNodesResponse newInstance(
|
||||
Map<NodeAttribute, Set<String>> map) {
|
||||
Map<NodeAttributeKey, List<NodeToAttributeValue>> map) {
|
||||
GetAttributesToNodesResponse response =
|
||||
Records.newRecord(GetAttributesToNodesResponse.class);
|
||||
response.setAttributeToNodes(map);
|
||||
@ -48,15 +49,17 @@ public static GetAttributesToNodesResponse newInstance(
|
||||
|
||||
@Public
|
||||
@Evolving
|
||||
public abstract void setAttributeToNodes(Map<NodeAttribute, Set<String>> map);
|
||||
public abstract void setAttributeToNodes(
|
||||
Map<NodeAttributeKey, List<NodeToAttributeValue>> map);
|
||||
|
||||
/*
|
||||
* Get attributes to node hostname mapping.
|
||||
/**
|
||||
* Get mapping of NodeAttributeKey to its associated mapping of list of
|
||||
* NodeToAttributeValuenode to attribute value.
|
||||
*
|
||||
* @return Map<NodeAttribute, Set<String>> node attributes to hostname
|
||||
* mapping.
|
||||
* @return Map<NodeAttributeKey, List<NodeToAttributeValue>> node attributes
|
||||
* to list of NodeToAttributeValuenode.
|
||||
*/
|
||||
@Public
|
||||
@Evolving
|
||||
public abstract Map<NodeAttribute, Set<String>> getAttributesToNodes();
|
||||
public abstract Map<NodeAttributeKey, List<NodeToAttributeValue>> getAttributesToNodes();
|
||||
}
|
||||
|
@ -17,14 +17,15 @@
|
||||
*/
|
||||
package org.apache.hadoop.yarn.api.protocolrecords;
|
||||
|
||||
import static org.apache.hadoop.classification.InterfaceAudience.*;
|
||||
import static org.apache.hadoop.classification.InterfaceStability.*;
|
||||
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttribute;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Evolving;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.ApplicationClientProtocol;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeInfo;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* The response sent by the <code>ResourceManager</code> to a client requesting
|
||||
@ -45,7 +46,7 @@ public abstract class GetClusterNodeAttributesResponse {
|
||||
* @return GetClusterNodeAttributesResponse.
|
||||
*/
|
||||
public static GetClusterNodeAttributesResponse newInstance(
|
||||
Set<NodeAttribute> attributes) {
|
||||
Set<NodeAttributeInfo> attributes) {
|
||||
GetClusterNodeAttributesResponse response =
|
||||
Records.newRecord(GetClusterNodeAttributesResponse.class);
|
||||
response.setNodeAttributes(attributes);
|
||||
@ -55,18 +56,18 @@ public static GetClusterNodeAttributesResponse newInstance(
|
||||
/**
|
||||
* Set node attributes to the response.
|
||||
*
|
||||
* @param attributes Node attributes
|
||||
* @param attributes Map of Node attributeKey to Type.
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setNodeAttributes(Set<NodeAttribute> attributes);
|
||||
public abstract void setNodeAttributes(Set<NodeAttributeInfo> attributes);
|
||||
|
||||
/**
|
||||
* Get node attributes of the response.
|
||||
* Get node attributes from the response.
|
||||
*
|
||||
* @return Node attributes
|
||||
* @return Node attributes.
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract Set<NodeAttribute> getNodeAttributes();
|
||||
public abstract Set<NodeAttributeInfo> getNodeAttributes();
|
||||
}
|
||||
|
@ -58,8 +58,9 @@ public static NodeAttribute newInstance(String attributePrefix,
|
||||
String attributeName, NodeAttributeType attributeType,
|
||||
String attributeValue) {
|
||||
NodeAttribute nodeAttribute = Records.newRecord(NodeAttribute.class);
|
||||
nodeAttribute.setAttributePrefix(attributePrefix);
|
||||
nodeAttribute.setAttributeName(attributeName);
|
||||
NodeAttributeKey nodeAttributeKey =
|
||||
NodeAttributeKey.newInstance(attributePrefix, attributeName);
|
||||
nodeAttribute.setAttributeKey(nodeAttributeKey);
|
||||
nodeAttribute.setAttributeType(attributeType);
|
||||
nodeAttribute.setAttributeValue(attributeValue);
|
||||
return nodeAttribute;
|
||||
@ -67,19 +68,11 @@ public static NodeAttribute newInstance(String attributePrefix,
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract String getAttributePrefix();
|
||||
public abstract NodeAttributeKey getAttributeKey();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setAttributePrefix(String attributePrefix);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract String getAttributeName();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setAttributeName(String attributeName);
|
||||
public abstract void setAttributeKey(NodeAttributeKey attributeKey);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
|
@ -0,0 +1,62 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.yarn.api.records;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Node Attribute Info describes a NodeAttribute.
|
||||
* </p>
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract class NodeAttributeInfo {
|
||||
|
||||
public static NodeAttributeInfo newInstance(NodeAttribute nodeAttribute) {
|
||||
return newInstance(nodeAttribute.getAttributeKey(),
|
||||
nodeAttribute.getAttributeType());
|
||||
}
|
||||
|
||||
public static NodeAttributeInfo newInstance(NodeAttributeKey nodeAttributeKey,
|
||||
NodeAttributeType attributeType) {
|
||||
NodeAttributeInfo nodeAttribute =
|
||||
Records.newRecord(NodeAttributeInfo.class);
|
||||
nodeAttribute.setAttributeKey(nodeAttributeKey);
|
||||
nodeAttribute.setAttributeType(attributeType);
|
||||
return nodeAttribute;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract NodeAttributeKey getAttributeKey();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setAttributeKey(NodeAttributeKey attributeKey);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract NodeAttributeType getAttributeType();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setAttributeType(NodeAttributeType attributeType);
|
||||
}
|
@ -0,0 +1,66 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.api.records;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Node AttributeKey uniquely identifies a given Node Attribute. Node Attribute
|
||||
* is identified based on attribute prefix and name.
|
||||
* </p>
|
||||
* <p>
|
||||
* Node Attribute Prefix is used as namespace to segregate the attributes.
|
||||
* </p>
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract class NodeAttributeKey {
|
||||
|
||||
public static NodeAttributeKey newInstance(String attributeName) {
|
||||
return newInstance(NodeAttribute.PREFIX_CENTRALIZED, attributeName);
|
||||
}
|
||||
|
||||
public static NodeAttributeKey newInstance(String attributePrefix,
|
||||
String attributeName) {
|
||||
NodeAttributeKey nodeAttributeKey =
|
||||
Records.newRecord(NodeAttributeKey.class);
|
||||
nodeAttributeKey.setAttributePrefix(attributePrefix);
|
||||
nodeAttributeKey.setAttributeName(attributeName);
|
||||
return nodeAttributeKey;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract String getAttributePrefix();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setAttributePrefix(String attributePrefix);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract String getAttributeName();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setAttributeName(String attributeName);
|
||||
}
|
@ -0,0 +1,57 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.api.records;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Public;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.util.Records;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* Mapping of Attribute Value to a Node.
|
||||
* </p>
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract class NodeToAttributeValue {
|
||||
public static NodeToAttributeValue newInstance(String hostname,
|
||||
String attributeValue) {
|
||||
NodeToAttributeValue nodeToAttributeValue =
|
||||
Records.newRecord(NodeToAttributeValue.class);
|
||||
nodeToAttributeValue.setAttributeValue(attributeValue);
|
||||
nodeToAttributeValue.setHostname(hostname);
|
||||
return nodeToAttributeValue;
|
||||
}
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract String getAttributeValue();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setAttributeValue(String attributeValue);
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract String getHostname();
|
||||
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract void setHostname(String hostname);
|
||||
}
|
@ -376,16 +376,31 @@ enum NodeAttributeTypeProto {
|
||||
STRING = 1;
|
||||
}
|
||||
|
||||
message NodeAttributeProto {
|
||||
optional string attributePrefix = 1;
|
||||
message NodeAttributeKeyProto {
|
||||
optional string attributePrefix = 1 [default="rm.yarn.io"];
|
||||
required string attributeName = 2;
|
||||
optional NodeAttributeTypeProto attributeType = 3 [default = STRING];
|
||||
optional string attributeValue = 4 [default=""];
|
||||
}
|
||||
|
||||
message NodeAttributeProto {
|
||||
required NodeAttributeKeyProto attributeKey = 1;
|
||||
optional NodeAttributeTypeProto attributeType = 2 [default = STRING];
|
||||
optional string attributeValue = 3 [default=""];
|
||||
}
|
||||
|
||||
|
||||
message NodeAttributeInfoProto {
|
||||
required NodeAttributeKeyProto attributeKey = 1;
|
||||
required NodeAttributeTypeProto attributeType = 2;
|
||||
}
|
||||
|
||||
message NodeToAttributeValueProto {
|
||||
required string hostname = 1;
|
||||
required string attributeValue = 2;
|
||||
}
|
||||
|
||||
message AttributeToNodesProto {
|
||||
required NodeAttributeProto nodeAttribute = 1;
|
||||
repeated string hostnames = 2;
|
||||
required NodeAttributeKeyProto nodeAttribute = 1;
|
||||
repeated NodeToAttributeValueProto nodeValueMap = 2;
|
||||
}
|
||||
|
||||
message NodeToAttributesProto {
|
||||
|
@ -264,11 +264,11 @@ message GetClusterNodeAttributesRequestProto {
|
||||
}
|
||||
|
||||
message GetClusterNodeAttributesResponseProto {
|
||||
repeated NodeAttributeProto nodeAttributes = 1;
|
||||
repeated NodeAttributeInfoProto nodeAttributes = 1;
|
||||
}
|
||||
|
||||
message GetAttributesToNodesRequestProto {
|
||||
repeated NodeAttributeProto nodeAttributes = 1;
|
||||
repeated NodeAttributeKeyProto nodeAttributes = 1;
|
||||
}
|
||||
|
||||
message GetAttributesToNodesResponseProto {
|
||||
|
@ -52,10 +52,13 @@
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerReport;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttribute;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeKey;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeInfo;
|
||||
import org.apache.hadoop.yarn.api.records.NodeId;
|
||||
import org.apache.hadoop.yarn.api.records.NodeLabel;
|
||||
import org.apache.hadoop.yarn.api.records.NodeReport;
|
||||
import org.apache.hadoop.yarn.api.records.NodeState;
|
||||
import org.apache.hadoop.yarn.api.records.NodeToAttributeValue;
|
||||
import org.apache.hadoop.yarn.api.records.Priority;
|
||||
import org.apache.hadoop.yarn.api.records.QueueInfo;
|
||||
import org.apache.hadoop.yarn.api.records.QueueUserACLInfo;
|
||||
@ -914,26 +917,27 @@ public abstract List<ResourceTypeInfo> getResourceTypeInfo()
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract Set<NodeAttribute> getClusterAttributes()
|
||||
public abstract Set<NodeAttributeInfo> getClusterAttributes()
|
||||
throws YarnException, IOException;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
* The interface used by client to get Attributes to nodes mapping
|
||||
* for specified node attributes in existing cluster.
|
||||
* The interface used by client to get mapping of AttributeKey to associated
|
||||
* NodeToAttributeValue list for specified node attributeKeys in the cluster.
|
||||
* </p>
|
||||
*
|
||||
* @param attributes Attributes for which Attributes to nodes mapping has to
|
||||
* be retrieved.If empty or null is set then will return
|
||||
* all attributes to node mapping in cluster.
|
||||
* @return Attributes to nodes mappings for specific Attributes.
|
||||
* @param attributes AttributeKeys for which associated NodeToAttributeValue
|
||||
* mapping value has to be retrieved. If empty or null is set then
|
||||
* will return mapping for all attributeKeys in the cluster
|
||||
* @return mapping of AttributeKey to List of associated
|
||||
* NodeToAttributeValue's.
|
||||
* @throws YarnException
|
||||
* @throws IOException
|
||||
*/
|
||||
@Public
|
||||
@Unstable
|
||||
public abstract Map<NodeAttribute, Set<String>> getAttributesToNodes(
|
||||
Set<NodeAttribute> attributes) throws YarnException, IOException;
|
||||
public abstract Map<NodeAttributeKey, List<NodeToAttributeValue>> getAttributesToNodes(
|
||||
Set<NodeAttributeKey> attributes) throws YarnException, IOException;
|
||||
|
||||
/**
|
||||
* <p>
|
||||
|
@ -99,10 +99,13 @@
|
||||
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerReport;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttribute;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeKey;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeInfo;
|
||||
import org.apache.hadoop.yarn.api.records.NodeId;
|
||||
import org.apache.hadoop.yarn.api.records.NodeLabel;
|
||||
import org.apache.hadoop.yarn.api.records.NodeReport;
|
||||
import org.apache.hadoop.yarn.api.records.NodeState;
|
||||
import org.apache.hadoop.yarn.api.records.NodeToAttributeValue;
|
||||
import org.apache.hadoop.yarn.api.records.Priority;
|
||||
import org.apache.hadoop.yarn.api.records.QueueInfo;
|
||||
import org.apache.hadoop.yarn.api.records.QueueUserACLInfo;
|
||||
@ -981,7 +984,7 @@ public List<ResourceTypeInfo> getResourceTypeInfo()
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<NodeAttribute> getClusterAttributes()
|
||||
public Set<NodeAttributeInfo> getClusterAttributes()
|
||||
throws YarnException, IOException {
|
||||
GetClusterNodeAttributesRequest request =
|
||||
GetClusterNodeAttributesRequest.newInstance();
|
||||
@ -989,8 +992,8 @@ public Set<NodeAttribute> getClusterAttributes()
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<NodeAttribute, Set<String>> getAttributesToNodes(
|
||||
Set<NodeAttribute> attributes) throws YarnException, IOException {
|
||||
public Map<NodeAttributeKey, List<NodeToAttributeValue>> getAttributesToNodes(
|
||||
Set<NodeAttributeKey> attributes) throws YarnException, IOException {
|
||||
GetAttributesToNodesRequest request =
|
||||
GetAttributesToNodesRequest.newInstance(attributes);
|
||||
return rmClient.getAttributesToNodes(request).getAttributesToNodes();
|
||||
|
@ -17,21 +17,21 @@
|
||||
*/
|
||||
package org.apache.hadoop.yarn.api.protocolrecords.impl.pb;
|
||||
|
||||
import com.google.protobuf.TextFormat;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetAttributesToNodesRequest;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttribute;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.NodeAttributePBImpl;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.apache.hadoop.classification.InterfaceAudience.*;
|
||||
import static org.apache.hadoop.classification.InterfaceStability.*;
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetAttributesToNodesRequest;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeKey;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.NodeAttributeKeyPBImpl;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesRequestProto;
|
||||
|
||||
import com.google.protobuf.TextFormat;
|
||||
|
||||
/**
|
||||
* Attributes to nodes mapping request.
|
||||
@ -41,7 +41,7 @@
|
||||
public class GetAttributesToNodesRequestPBImpl
|
||||
extends GetAttributesToNodesRequest {
|
||||
|
||||
private Set<NodeAttribute> nodeAttributes = null;
|
||||
private Set<NodeAttributeKey> nodeAttributes = null;
|
||||
|
||||
private GetAttributesToNodesRequestProto proto =
|
||||
GetAttributesToNodesRequestProto.getDefaultInstance();
|
||||
@ -86,9 +86,9 @@ private void addLocalAttributesToProto() {
|
||||
if (nodeAttributes == null) {
|
||||
return;
|
||||
}
|
||||
Iterable<NodeAttributeProto> iterable =
|
||||
() -> new Iterator<NodeAttributeProto>() {
|
||||
private Iterator<NodeAttribute> iter = nodeAttributes.iterator();
|
||||
Iterable<NodeAttributeKeyProto> iterable =
|
||||
() -> new Iterator<NodeAttributeKeyProto>() {
|
||||
private Iterator<NodeAttributeKey> iter = nodeAttributes.iterator();
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
@ -96,7 +96,7 @@ public boolean hasNext() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public NodeAttributeProto next() {
|
||||
public NodeAttributeKeyProto next() {
|
||||
return convertToProtoFormat(iter.next());
|
||||
}
|
||||
|
||||
@ -110,12 +110,13 @@ public void remove() {
|
||||
builder.addAllNodeAttributes(iterable);
|
||||
}
|
||||
|
||||
private NodeAttributePBImpl convertFromProtoFormat(NodeAttributeProto p) {
|
||||
return new NodeAttributePBImpl(p);
|
||||
private NodeAttributeKeyPBImpl convertFromProtoFormat(
|
||||
NodeAttributeKeyProto p) {
|
||||
return new NodeAttributeKeyPBImpl(p);
|
||||
}
|
||||
|
||||
private NodeAttributeProto convertToProtoFormat(NodeAttribute t) {
|
||||
return ((NodeAttributePBImpl) t).getProto();
|
||||
private NodeAttributeKeyProto convertToProtoFormat(NodeAttributeKey t) {
|
||||
return ((NodeAttributeKeyPBImpl) t).getProto();
|
||||
}
|
||||
|
||||
private void maybeInitBuilder() {
|
||||
@ -131,7 +132,7 @@ private void initNodeAttributes() {
|
||||
}
|
||||
YarnServiceProtos.GetAttributesToNodesRequestProtoOrBuilder p =
|
||||
viaProto ? proto : builder;
|
||||
List<NodeAttributeProto> nodeAttributesList = p.getNodeAttributesList();
|
||||
List<NodeAttributeKeyProto> nodeAttributesList = p.getNodeAttributesList();
|
||||
this.nodeAttributes = new HashSet<>();
|
||||
nodeAttributesList
|
||||
.forEach((v) -> nodeAttributes.add(convertFromProtoFormat(v)));
|
||||
@ -159,7 +160,7 @@ public String toString() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setNodeAttributes(Set<NodeAttribute> attributes) {
|
||||
public void setNodeAttributes(Set<NodeAttributeKey> attributes) {
|
||||
maybeInitBuilder();
|
||||
if (nodeAttributes == null) {
|
||||
builder.clearNodeAttributes();
|
||||
@ -168,7 +169,7 @@ public void setNodeAttributes(Set<NodeAttribute> attributes) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<NodeAttribute> getNodeAttributes() {
|
||||
public Set<NodeAttributeKey> getNodeAttributes() {
|
||||
initNodeAttributes();
|
||||
return this.nodeAttributes;
|
||||
}
|
||||
|
@ -17,23 +17,24 @@
|
||||
*/
|
||||
package org.apache.hadoop.yarn.api.protocolrecords.impl.pb;
|
||||
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetAttributesToNodesResponse;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttribute;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.NodeAttributePBImpl;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
import static org.apache.hadoop.classification.InterfaceAudience.*;
|
||||
import static org.apache.hadoop.classification.InterfaceStability.*;
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetAttributesToNodesResponse;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeKey;
|
||||
import org.apache.hadoop.yarn.api.records.NodeToAttributeValue;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.NodeAttributeKeyPBImpl;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.NodeToAttributeValuePBImpl;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.AttributeToNodesProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAttributesToNodesResponseProto;
|
||||
|
||||
/**
|
||||
* Attributes to nodes response.
|
||||
@ -48,7 +49,7 @@ public class GetAttributesToNodesResponsePBImpl
|
||||
private GetAttributesToNodesResponseProto.Builder builder = null;
|
||||
private boolean viaProto = false;
|
||||
|
||||
private Map<NodeAttribute, Set<String>> attributesToNodes;
|
||||
private Map<NodeAttributeKey, List<NodeToAttributeValue>> attributesToNodes;
|
||||
|
||||
public GetAttributesToNodesResponsePBImpl() {
|
||||
this.builder = GetAttributesToNodesResponseProto.newBuilder();
|
||||
@ -70,10 +71,15 @@ private void initAttributesToNodes() {
|
||||
this.attributesToNodes = new HashMap<>();
|
||||
|
||||
for (AttributeToNodesProto c : list) {
|
||||
Set<String> setNodes = new HashSet<>(c.getHostnamesList());
|
||||
if (!setNodes.isEmpty()) {
|
||||
this.attributesToNodes
|
||||
.put(convertFromProtoFormat(c.getNodeAttribute()), setNodes);
|
||||
List<NodeToAttributeValueProto> nodeValueMapList =
|
||||
c.getNodeValueMapList();
|
||||
List<NodeToAttributeValue> nodeToAttributeValue = new ArrayList<>();
|
||||
for (NodeToAttributeValueProto valueProto : nodeValueMapList) {
|
||||
nodeToAttributeValue.add(convertFromProtoFormat(valueProto));
|
||||
}
|
||||
if (!nodeToAttributeValue.isEmpty()) {
|
||||
this.attributesToNodes.put(convertFromProtoFormat(c.getNodeAttribute()),
|
||||
nodeToAttributeValue);
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -94,7 +100,7 @@ private void addAttributesToNodesToProto() {
|
||||
Iterable<AttributeToNodesProto> iterable =
|
||||
() -> new Iterator<AttributeToNodesProto>() {
|
||||
|
||||
private Iterator<Map.Entry<NodeAttribute, Set<String>>> iter =
|
||||
private Iterator<Map.Entry<NodeAttributeKey, List<NodeToAttributeValue>>> iter =
|
||||
attributesToNodes.entrySet().iterator();
|
||||
|
||||
@Override
|
||||
@ -104,14 +110,18 @@ public void remove() {
|
||||
|
||||
@Override
|
||||
public AttributeToNodesProto next() {
|
||||
Map.Entry<NodeAttribute, Set<String>> now = iter.next();
|
||||
Set<String> hostNames = new HashSet<>();
|
||||
for (String host : now.getValue()) {
|
||||
hostNames.add(host);
|
||||
Map.Entry<NodeAttributeKey, List<NodeToAttributeValue>> attrToNodes
|
||||
= iter.next();
|
||||
|
||||
AttributeToNodesProto.Builder attrToNodesBuilder =
|
||||
AttributeToNodesProto.newBuilder().setNodeAttribute(
|
||||
convertToProtoFormat(attrToNodes.getKey()));
|
||||
for (NodeToAttributeValue hostToAttrVal : attrToNodes.getValue()) {
|
||||
attrToNodesBuilder
|
||||
.addNodeValueMap(convertToProtoFormat(hostToAttrVal));
|
||||
}
|
||||
return AttributeToNodesProto.newBuilder()
|
||||
.setNodeAttribute(convertToProtoFormat(now.getKey()))
|
||||
.addAllHostnames(hostNames).build();
|
||||
|
||||
return attrToNodesBuilder.build();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -122,12 +132,22 @@ public boolean hasNext() {
|
||||
builder.addAllAttributesToNodes(iterable);
|
||||
}
|
||||
|
||||
private NodeAttributePBImpl convertFromProtoFormat(NodeAttributeProto p) {
|
||||
return new NodeAttributePBImpl(p);
|
||||
private NodeAttributeKey convertFromProtoFormat(NodeAttributeKeyProto p) {
|
||||
return new NodeAttributeKeyPBImpl(p);
|
||||
}
|
||||
|
||||
private NodeAttributeProto convertToProtoFormat(NodeAttribute t) {
|
||||
return ((NodeAttributePBImpl) t).getProto();
|
||||
private NodeAttributeKeyProto convertToProtoFormat(NodeAttributeKey t) {
|
||||
return ((NodeAttributeKeyPBImpl) t).getProto();
|
||||
}
|
||||
|
||||
private NodeToAttributeValue convertFromProtoFormat(
|
||||
NodeToAttributeValueProto p) {
|
||||
return new NodeToAttributeValuePBImpl(p);
|
||||
}
|
||||
|
||||
private NodeToAttributeValueProto convertToProtoFormat(
|
||||
NodeToAttributeValue t) {
|
||||
return ((NodeToAttributeValuePBImpl) t).getProto();
|
||||
}
|
||||
|
||||
private void mergeLocalToBuilder() {
|
||||
@ -170,14 +190,15 @@ public boolean equals(Object other) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setAttributeToNodes(Map<NodeAttribute, Set<String>> map) {
|
||||
public void setAttributeToNodes(
|
||||
Map<NodeAttributeKey, List<NodeToAttributeValue>> map) {
|
||||
initAttributesToNodes();
|
||||
attributesToNodes.clear();
|
||||
attributesToNodes.putAll(map);
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<NodeAttribute, Set<String>> getAttributesToNodes() {
|
||||
public Map<NodeAttributeKey, List<NodeToAttributeValue>> getAttributesToNodes() {
|
||||
initAttributesToNodes();
|
||||
return this.attributesToNodes;
|
||||
}
|
||||
|
@ -17,19 +17,19 @@
|
||||
*/
|
||||
package org.apache.hadoop.yarn.api.protocolrecords.impl.pb;
|
||||
|
||||
import static org.apache.hadoop.classification.InterfaceAudience.*;
|
||||
import static org.apache.hadoop.classification.InterfaceStability.*;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeAttributesResponse;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttribute;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.NodeAttributePBImpl;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeAttributesResponse;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeInfo;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.NodeAttributeInfoPBImpl;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetClusterNodeAttributesResponseProto;
|
||||
|
||||
/**
|
||||
* Cluster node attributes response.
|
||||
@ -42,7 +42,7 @@ public class GetClusterNodeAttributesResponsePBImpl
|
||||
private GetClusterNodeAttributesResponseProto proto =
|
||||
GetClusterNodeAttributesResponseProto.getDefaultInstance();
|
||||
private GetClusterNodeAttributesResponseProto.Builder builder = null;
|
||||
private Set<NodeAttribute> updatedNodeAttributes;
|
||||
private Set<NodeAttributeInfo> clusterNodeAttributes;
|
||||
private boolean viaProto = false;
|
||||
|
||||
public GetClusterNodeAttributesResponsePBImpl() {
|
||||
@ -72,7 +72,7 @@ private void mergeLocalToProto() {
|
||||
}
|
||||
|
||||
private void mergeLocalToBuilder() {
|
||||
if (this.updatedNodeAttributes != null) {
|
||||
if (this.clusterNodeAttributes != null) {
|
||||
addNodeAttributesToProto();
|
||||
}
|
||||
}
|
||||
@ -80,11 +80,12 @@ private void mergeLocalToBuilder() {
|
||||
private void addNodeAttributesToProto() {
|
||||
maybeInitBuilder();
|
||||
builder.clearNodeAttributes();
|
||||
List<NodeAttributeProto> protoList = new ArrayList<>();
|
||||
for (NodeAttribute r : this.updatedNodeAttributes) {
|
||||
protoList.add(convertToProtoFormat(r));
|
||||
if (clusterNodeAttributes == null || clusterNodeAttributes.isEmpty()) {
|
||||
return;
|
||||
}
|
||||
builder.addAllNodeAttributes(protoList);
|
||||
|
||||
builder.addAllNodeAttributes(clusterNodeAttributes.stream()
|
||||
.map(s -> convertToProtoFormat(s)).collect(Collectors.toSet()));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -112,41 +113,44 @@ private void maybeInitBuilder() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized void setNodeAttributes(Set<NodeAttribute> attributes) {
|
||||
public synchronized void setNodeAttributes(
|
||||
Set<NodeAttributeInfo> attributes) {
|
||||
maybeInitBuilder();
|
||||
this.updatedNodeAttributes = new HashSet<>();
|
||||
this.clusterNodeAttributes = new HashSet<>();
|
||||
if (attributes == null) {
|
||||
builder.clearNodeAttributes();
|
||||
return;
|
||||
}
|
||||
this.updatedNodeAttributes.addAll(attributes);
|
||||
this.clusterNodeAttributes.addAll(attributes);
|
||||
}
|
||||
|
||||
@Override
|
||||
public synchronized Set<NodeAttribute> getNodeAttributes() {
|
||||
if (this.updatedNodeAttributes != null) {
|
||||
return this.updatedNodeAttributes;
|
||||
public synchronized Set<NodeAttributeInfo> getNodeAttributes() {
|
||||
if (this.clusterNodeAttributes != null) {
|
||||
return this.clusterNodeAttributes;
|
||||
}
|
||||
initLocalNodeAttributes();
|
||||
return this.updatedNodeAttributes;
|
||||
return this.clusterNodeAttributes;
|
||||
}
|
||||
|
||||
private void initLocalNodeAttributes() {
|
||||
YarnServiceProtos.GetClusterNodeAttributesResponseProtoOrBuilder p =
|
||||
viaProto ? proto : builder;
|
||||
List<NodeAttributeProto> attributesProtoList = p.getNodeAttributesList();
|
||||
this.updatedNodeAttributes = new HashSet<>();
|
||||
for (NodeAttributeProto r : attributesProtoList) {
|
||||
this.updatedNodeAttributes.add(convertFromProtoFormat(r));
|
||||
}
|
||||
List<NodeAttributeInfoProto> attributesProtoList =
|
||||
p.getNodeAttributesList();
|
||||
this.clusterNodeAttributes = new HashSet<>();
|
||||
clusterNodeAttributes.addAll(attributesProtoList.stream()
|
||||
.map(attr -> convertFromProtoFormat(attr)).collect(Collectors.toSet()));
|
||||
}
|
||||
|
||||
private NodeAttribute convertFromProtoFormat(NodeAttributeProto p) {
|
||||
return new NodeAttributePBImpl(p);
|
||||
private NodeAttributeInfoProto convertToProtoFormat(
|
||||
NodeAttributeInfo attributeInfo) {
|
||||
return ((NodeAttributeInfoPBImpl)attributeInfo).getProto();
|
||||
}
|
||||
|
||||
private NodeAttributeProto convertToProtoFormat(NodeAttribute t) {
|
||||
return ((NodeAttributePBImpl) t).getProto();
|
||||
private NodeAttributeInfo convertFromProtoFormat(
|
||||
NodeAttributeInfoProto nodeAttributeInfoProto) {
|
||||
return new NodeAttributeInfoPBImpl(nodeAttributeInfoProto);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -0,0 +1,143 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.yarn.api.records.impl.pb;
|
||||
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeKey;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeInfo;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeType;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProtoOrBuilder;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto;
|
||||
|
||||
/**
|
||||
* Implementation for NodeAttributeInfo.
|
||||
*
|
||||
*/
|
||||
public class NodeAttributeInfoPBImpl extends NodeAttributeInfo {
|
||||
private NodeAttributeInfoProto proto =
|
||||
NodeAttributeInfoProto.getDefaultInstance();
|
||||
private NodeAttributeInfoProto.Builder builder = null;
|
||||
private boolean viaProto = false;
|
||||
|
||||
public NodeAttributeInfoPBImpl() {
|
||||
builder = NodeAttributeInfoProto.newBuilder();
|
||||
}
|
||||
|
||||
public NodeAttributeInfoPBImpl(NodeAttributeInfoProto proto) {
|
||||
this.proto = proto;
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
public NodeAttributeInfoProto getProto() {
|
||||
proto = viaProto ? proto : builder.build();
|
||||
viaProto = true;
|
||||
return proto;
|
||||
}
|
||||
|
||||
private void maybeInitBuilder() {
|
||||
if (viaProto || builder == null) {
|
||||
builder = NodeAttributeInfoProto.newBuilder(proto);
|
||||
}
|
||||
viaProto = false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public NodeAttributeKey getAttributeKey() {
|
||||
NodeAttributeInfoProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasAttributeKey()) {
|
||||
return null;
|
||||
}
|
||||
return convertFromProtoFormat(p.getAttributeKey());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setAttributeKey(NodeAttributeKey attributeKey) {
|
||||
maybeInitBuilder();
|
||||
if (attributeKey == null) {
|
||||
builder.clearAttributeKey();
|
||||
return;
|
||||
}
|
||||
builder.setAttributeKey(convertToProtoFormat(attributeKey));
|
||||
}
|
||||
|
||||
@Override
|
||||
public NodeAttributeType getAttributeType() {
|
||||
NodeAttributeInfoProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasAttributeType()) {
|
||||
return null;
|
||||
}
|
||||
return convertFromProtoFormat(p.getAttributeType());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setAttributeType(NodeAttributeType attributeType) {
|
||||
maybeInitBuilder();
|
||||
if (attributeType == null) {
|
||||
builder.clearAttributeType();
|
||||
return;
|
||||
}
|
||||
builder.setAttributeType(convertToProtoFormat(attributeType));
|
||||
}
|
||||
|
||||
private NodeAttributeTypeProto convertToProtoFormat(
|
||||
NodeAttributeType attributeType) {
|
||||
return NodeAttributeTypeProto.valueOf(attributeType.name());
|
||||
}
|
||||
|
||||
private NodeAttributeType convertFromProtoFormat(
|
||||
NodeAttributeTypeProto containerState) {
|
||||
return NodeAttributeType.valueOf(containerState.name());
|
||||
}
|
||||
|
||||
private NodeAttributeKeyPBImpl convertFromProtoFormat(
|
||||
NodeAttributeKeyProto attributeKeyProto) {
|
||||
return new NodeAttributeKeyPBImpl(attributeKeyProto);
|
||||
}
|
||||
|
||||
private NodeAttributeKeyProto convertToProtoFormat(
|
||||
NodeAttributeKey attributeKey) {
|
||||
return ((NodeAttributeKeyPBImpl) attributeKey).getProto();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return getAttributeKey().hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (obj instanceof NodeAttributeInfo) {
|
||||
NodeAttributeInfo other = (NodeAttributeInfo) obj;
|
||||
getAttributeKey().equals(other.getAttributeKey());
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return getAttributeKey().toString() + ":Type-" + getAttributeType();
|
||||
}
|
||||
}
|
@ -0,0 +1,140 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.api.records.impl.pb;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeKey;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProtoOrBuilder;
|
||||
|
||||
/**
|
||||
* Implementation for NodeAttributeKey.
|
||||
*
|
||||
*/
|
||||
@Private
|
||||
@Unstable
|
||||
public class NodeAttributeKeyPBImpl extends NodeAttributeKey {
|
||||
private NodeAttributeKeyProto proto =
|
||||
NodeAttributeKeyProto.getDefaultInstance();
|
||||
private NodeAttributeKeyProto.Builder builder = null;
|
||||
private boolean viaProto = false;
|
||||
|
||||
public NodeAttributeKeyPBImpl() {
|
||||
builder = NodeAttributeKeyProto.newBuilder();
|
||||
}
|
||||
|
||||
public NodeAttributeKeyPBImpl(NodeAttributeKeyProto proto) {
|
||||
this.proto = proto;
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
public NodeAttributeKeyProto getProto() {
|
||||
proto = viaProto ? proto : builder.build();
|
||||
viaProto = true;
|
||||
return proto;
|
||||
}
|
||||
|
||||
private void maybeInitBuilder() {
|
||||
if (viaProto || builder == null) {
|
||||
builder = NodeAttributeKeyProto.newBuilder(proto);
|
||||
}
|
||||
viaProto = false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getAttributePrefix() {
|
||||
NodeAttributeKeyProtoOrBuilder p = viaProto ? proto : builder;
|
||||
return p.getAttributePrefix();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setAttributePrefix(String attributePrefix) {
|
||||
maybeInitBuilder();
|
||||
if (attributePrefix == null) {
|
||||
builder.clearAttributePrefix();
|
||||
return;
|
||||
}
|
||||
builder.setAttributePrefix(attributePrefix);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getAttributeName() {
|
||||
NodeAttributeKeyProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasAttributeName()) {
|
||||
return null;
|
||||
}
|
||||
return p.getAttributeName();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setAttributeName(String attributeName) {
|
||||
maybeInitBuilder();
|
||||
if (attributeName == null) {
|
||||
builder.clearAttributeName();
|
||||
return;
|
||||
}
|
||||
builder.setAttributeName(attributeName);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + ((getAttributePrefix() == null) ? 0
|
||||
: getAttributePrefix().hashCode());
|
||||
result = prime * result
|
||||
+ ((getAttributeName() == null) ? 0 : getAttributeName().hashCode());
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (obj instanceof NodeAttributeKey) {
|
||||
NodeAttributeKey other = (NodeAttributeKey) obj;
|
||||
if (!compare(getAttributePrefix(), other.getAttributePrefix())) {
|
||||
return false;
|
||||
}
|
||||
if (!compare(getAttributeName(), other.getAttributeName())) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private static boolean compare(Object left, Object right) {
|
||||
if (left == null) {
|
||||
return right == null;
|
||||
} else {
|
||||
return left.equals(right);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Prefix-" + getAttributePrefix() + " :Name-" + getAttributeName();
|
||||
}
|
||||
}
|
@ -18,12 +18,21 @@
|
||||
|
||||
package org.apache.hadoop.yarn.api.records.impl.pb;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience.Private;
|
||||
import org.apache.hadoop.classification.InterfaceStability.Unstable;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttribute;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeKey;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeType;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProtoOrBuilder;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeTypeProto;
|
||||
|
||||
/**
|
||||
* Implementation for NodeAttribute.
|
||||
*/
|
||||
@Private
|
||||
@Unstable
|
||||
public class NodeAttributePBImpl extends NodeAttribute {
|
||||
private NodeAttributeProto proto = NodeAttributeProto.getDefaultInstance();
|
||||
private NodeAttributeProto.Builder builder = null;
|
||||
@ -52,22 +61,22 @@ private void maybeInitBuilder() {
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getAttributeName() {
|
||||
public NodeAttributeKey getAttributeKey() {
|
||||
NodeAttributeProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasAttributeName()) {
|
||||
if (!p.hasAttributeKey()) {
|
||||
return null;
|
||||
}
|
||||
return p.getAttributeName();
|
||||
return convertFromProtoFormat(p.getAttributeKey());
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setAttributeName(String attributeName) {
|
||||
public void setAttributeKey(NodeAttributeKey attributeKey) {
|
||||
maybeInitBuilder();
|
||||
if(attributeName == null) {
|
||||
builder.clearAttributeName();
|
||||
if(attributeKey == null) {
|
||||
builder.clearAttributeKey();
|
||||
return;
|
||||
}
|
||||
builder.setAttributeName(attributeName);
|
||||
builder.setAttributeKey(convertToProtoFormat(attributeKey));
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -118,15 +127,19 @@ private NodeAttributeType convertFromProtoFormat(
|
||||
return NodeAttributeType.valueOf(containerState.name());
|
||||
}
|
||||
|
||||
private NodeAttributeKeyPBImpl convertFromProtoFormat(
|
||||
NodeAttributeKeyProto attributeKeyProto) {
|
||||
return new NodeAttributeKeyPBImpl(attributeKeyProto);
|
||||
}
|
||||
|
||||
private NodeAttributeKeyProto convertToProtoFormat(
|
||||
NodeAttributeKey attributeKey) {
|
||||
return ((NodeAttributeKeyPBImpl)attributeKey).getProto();
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result + ((getAttributePrefix() == null) ? 0
|
||||
: getAttributePrefix().hashCode());
|
||||
result = prime * result
|
||||
+ ((getAttributeName() == null) ? 0 : getAttributeName().hashCode());
|
||||
return result;
|
||||
return getAttributeKey().hashCode();
|
||||
}
|
||||
|
||||
@Override
|
||||
@ -139,47 +152,15 @@ public boolean equals(Object obj) {
|
||||
}
|
||||
if (obj instanceof NodeAttribute) {
|
||||
NodeAttribute other = (NodeAttribute) obj;
|
||||
if (!compare(getAttributePrefix(), other.getAttributePrefix())) {
|
||||
return false;
|
||||
}
|
||||
if (!compare(getAttributeName(), other.getAttributeName())) {
|
||||
return false;
|
||||
}
|
||||
getAttributeKey().equals(other.getAttributeKey());
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private static boolean compare(Object left, Object right) {
|
||||
if (left == null) {
|
||||
return right == null;
|
||||
} else {
|
||||
return left.equals(right);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getAttributePrefix() {
|
||||
NodeAttributeProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasAttributePrefix()) {
|
||||
return null;
|
||||
}
|
||||
return p.getAttributePrefix();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setAttributePrefix(String attributePrefix) {
|
||||
maybeInitBuilder();
|
||||
if(attributePrefix == null) {
|
||||
builder.clearAttributePrefix();
|
||||
return;
|
||||
}
|
||||
builder.setAttributePrefix(attributePrefix);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Prefix-" + getAttributePrefix() + " :Name-" + getAttributeName()
|
||||
+ ":Value-" + getAttributeValue() + ":Type-" + getAttributeType();
|
||||
return getAttributeKey().toString() + ":Value-" + getAttributeValue()
|
||||
+ ":Type-" + getAttributeType();
|
||||
}
|
||||
}
|
||||
|
@ -0,0 +1,137 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.yarn.api.records.impl.pb;
|
||||
|
||||
import org.apache.hadoop.yarn.api.records.NodeToAttributeValue;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProtoOrBuilder;
|
||||
|
||||
/**
|
||||
* PB Implementation for NodeToAttributeValue.
|
||||
*
|
||||
*/
|
||||
public class NodeToAttributeValuePBImpl extends NodeToAttributeValue {
|
||||
private NodeToAttributeValueProto proto =
|
||||
NodeToAttributeValueProto.getDefaultInstance();
|
||||
private NodeToAttributeValueProto.Builder builder = null;
|
||||
private boolean viaProto = false;
|
||||
|
||||
public NodeToAttributeValuePBImpl() {
|
||||
builder = NodeToAttributeValueProto.newBuilder();
|
||||
}
|
||||
|
||||
public NodeToAttributeValuePBImpl(NodeToAttributeValueProto proto) {
|
||||
this.proto = proto;
|
||||
viaProto = true;
|
||||
}
|
||||
|
||||
public NodeToAttributeValueProto getProto() {
|
||||
proto = viaProto ? proto : builder.build();
|
||||
viaProto = true;
|
||||
return proto;
|
||||
}
|
||||
|
||||
private void maybeInitBuilder() {
|
||||
if (viaProto || builder == null) {
|
||||
builder = NodeToAttributeValueProto.newBuilder(proto);
|
||||
}
|
||||
viaProto = false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getAttributeValue() {
|
||||
NodeToAttributeValueProtoOrBuilder p = viaProto ? proto : builder;
|
||||
return p.getAttributeValue();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setAttributeValue(String attributeValue) {
|
||||
maybeInitBuilder();
|
||||
if (attributeValue == null) {
|
||||
builder.clearAttributeValue();
|
||||
return;
|
||||
}
|
||||
builder.setAttributeValue(attributeValue);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getHostname() {
|
||||
NodeToAttributeValueProtoOrBuilder p = viaProto ? proto : builder;
|
||||
if (!p.hasHostname()) {
|
||||
return null;
|
||||
}
|
||||
return p.getHostname();
|
||||
}
|
||||
|
||||
@Override
|
||||
public void setHostname(String hostname) {
|
||||
maybeInitBuilder();
|
||||
if (hostname == null) {
|
||||
builder.clearHostname();
|
||||
return;
|
||||
}
|
||||
builder.setHostname(hostname);
|
||||
}
|
||||
|
||||
@Override
|
||||
public int hashCode() {
|
||||
final int prime = 31;
|
||||
int result = 1;
|
||||
result = prime * result
|
||||
+ ((getAttributeValue() == null) ? 0 : getAttributeValue().hashCode());
|
||||
result = prime * result
|
||||
+ ((getHostname() == null) ? 0 : getHostname().hashCode());
|
||||
return result;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
if (this == obj) {
|
||||
return true;
|
||||
}
|
||||
if (obj == null) {
|
||||
return false;
|
||||
}
|
||||
if (obj instanceof NodeToAttributeValue) {
|
||||
NodeToAttributeValue other = (NodeToAttributeValue) obj;
|
||||
if (!compare(getAttributeValue(), other.getAttributeValue())) {
|
||||
return false;
|
||||
}
|
||||
if (!compare(getHostname(), other.getHostname())) {
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
private static boolean compare(Object left, Object right) {
|
||||
if (left == null) {
|
||||
return right == null;
|
||||
} else {
|
||||
return left.equals(right);
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String toString() {
|
||||
return "Name-" + getHostname() + " : Attribute Value-"
|
||||
+ getAttributeValue();
|
||||
}
|
||||
}
|
@ -25,6 +25,7 @@
|
||||
|
||||
import org.apache.hadoop.service.AbstractService;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttribute;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeKey;
|
||||
import org.apache.hadoop.yarn.server.api.protocolrecords.NodeToAttributes;
|
||||
|
||||
/**
|
||||
@ -81,20 +82,21 @@ public abstract void removeNodeAttributes(
|
||||
*
|
||||
* @param prefix set of prefix string's for which the attributes needs to
|
||||
* returned
|
||||
* @return set of node Attributes
|
||||
* @return Set of node Attributes
|
||||
*/
|
||||
public abstract Set<NodeAttribute> getClusterNodeAttributes(
|
||||
Set<String> prefix);
|
||||
|
||||
/**
|
||||
* Given a attribute set, return what all Nodes have attribute mapped to it.
|
||||
* If the attributes set is null or empty, all attributes mapping are
|
||||
* returned.
|
||||
* Return a map of Nodes to attribute value for the given NodeAttributeKeys.
|
||||
* If the attributeKeys set is null or empty, then mapping for all attributes
|
||||
* are returned.
|
||||
*
|
||||
* @return a Map of attributes to set of hostnames.
|
||||
* @return a Map of attributeKeys to a map of hostnames to its attribute
|
||||
* values.
|
||||
*/
|
||||
public abstract Map<NodeAttribute, Set<String>> getAttributesToNodes(
|
||||
Set<NodeAttribute> attributes);
|
||||
public abstract Map<NodeAttributeKey, Map<String, AttributeValue>> getAttributesToNodes(
|
||||
Set<NodeAttributeKey> attributes);
|
||||
|
||||
/**
|
||||
* NodeAttribute to AttributeValue Map.
|
||||
|
@ -19,6 +19,7 @@
|
||||
|
||||
import com.google.common.base.Strings;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttribute;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeKey;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Set;
|
||||
@ -115,14 +116,18 @@ public static void validateNodeAttributes(Set<NodeAttribute> attributeSet)
|
||||
throws IOException {
|
||||
if (attributeSet != null && !attributeSet.isEmpty()) {
|
||||
for (NodeAttribute nodeAttribute : attributeSet) {
|
||||
String prefix = nodeAttribute.getAttributePrefix();
|
||||
NodeAttributeKey attributeKey = nodeAttribute.getAttributeKey();
|
||||
if (attributeKey == null) {
|
||||
throw new IOException("AttributeKey must be set");
|
||||
}
|
||||
String prefix = attributeKey.getAttributePrefix();
|
||||
if (Strings.isNullOrEmpty(prefix)) {
|
||||
throw new IOException("Attribute prefix must be set");
|
||||
}
|
||||
// Verify attribute prefix format.
|
||||
checkAndThrowAttributePrefix(prefix);
|
||||
// Verify attribute name format.
|
||||
checkAndThrowLabelName(nodeAttribute.getAttributeName());
|
||||
checkAndThrowLabelName(attributeKey.getAttributeName());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -140,8 +145,9 @@ public static Set<NodeAttribute> filterAttributesByPrefix(
|
||||
if (Strings.isNullOrEmpty(prefix)) {
|
||||
return attributeSet;
|
||||
}
|
||||
return attributeSet.stream().filter(
|
||||
nodeAttribute -> prefix.equals(nodeAttribute.getAttributePrefix()))
|
||||
return attributeSet.stream()
|
||||
.filter(nodeAttribute -> prefix
|
||||
.equals(nodeAttribute.getAttributeKey().getAttributePrefix()))
|
||||
.collect(Collectors.toSet());
|
||||
}
|
||||
}
|
||||
|
@ -18,8 +18,8 @@
|
||||
|
||||
package org.apache.hadoop.yarn.nodelabels;
|
||||
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.HashMap;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttribute;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeType;
|
||||
@ -32,11 +32,11 @@ public class RMNodeAttribute extends AbstractLabel {
|
||||
|
||||
private NodeAttribute attribute;
|
||||
// TODO need to revisit whether we need to make this concurrent implementation
|
||||
private Set<String> nodes = new HashSet<>();
|
||||
private Map<String, AttributeValue> nodes = new HashMap<>();
|
||||
|
||||
public RMNodeAttribute(NodeAttribute attribute) {
|
||||
this(attribute.getAttributeName(), Resource.newInstance(0, 0), 0,
|
||||
attribute);
|
||||
this(attribute.getAttributeKey().getAttributeName(),
|
||||
Resource.newInstance(0, 0), 0, attribute);
|
||||
}
|
||||
|
||||
public RMNodeAttribute(String labelName, Resource res, int activeNMs,
|
||||
@ -57,16 +57,16 @@ public NodeAttributeType getAttributeType() {
|
||||
return attribute.getAttributeType();
|
||||
}
|
||||
|
||||
public void addNode(String node) {
|
||||
nodes.add(node);
|
||||
public void addNode(String node, AttributeValue attributeValue) {
|
||||
nodes.put(node, attributeValue);
|
||||
}
|
||||
|
||||
public void removeNode(String node) {
|
||||
nodes.remove(node);
|
||||
}
|
||||
|
||||
public Set<String> getAssociatedNodeIds() {
|
||||
return new HashSet<String>(nodes);
|
||||
public Map<String, AttributeValue> getAssociatedNodeIds() {
|
||||
return new HashMap<String, AttributeValue>(nodes);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -17,6 +17,7 @@
|
||||
*/
|
||||
package org.apache.hadoop.yarn.api;
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
|
||||
import org.apache.commons.lang3.Range;
|
||||
import org.apache.hadoop.security.proto.SecurityProtos.CancelDelegationTokenRequestProto;
|
||||
@ -112,7 +113,6 @@
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.StopContainersResponsePBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.SubmitApplicationRequestPBImpl;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.impl.pb.SubmitApplicationResponsePBImpl;
|
||||
import org.apache.hadoop.yarn.api.records.CollectorInfo;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptId;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationAttemptReport;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationId;
|
||||
@ -120,6 +120,7 @@
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationResourceUsageReport;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationSubmissionContext;
|
||||
import org.apache.hadoop.yarn.api.records.ApplicationTimeout;
|
||||
import org.apache.hadoop.yarn.api.records.CollectorInfo;
|
||||
import org.apache.hadoop.yarn.api.records.Container;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
|
||||
@ -131,9 +132,12 @@
|
||||
import org.apache.hadoop.yarn.api.records.LogAggregationContext;
|
||||
import org.apache.hadoop.yarn.api.records.NMToken;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttribute;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeKey;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeInfo;
|
||||
import org.apache.hadoop.yarn.api.records.NodeId;
|
||||
import org.apache.hadoop.yarn.api.records.NodeLabel;
|
||||
import org.apache.hadoop.yarn.api.records.NodeReport;
|
||||
import org.apache.hadoop.yarn.api.records.NodeToAttributeValue;
|
||||
import org.apache.hadoop.yarn.api.records.PreemptionContainer;
|
||||
import org.apache.hadoop.yarn.api.records.PreemptionContract;
|
||||
import org.apache.hadoop.yarn.api.records.PreemptionMessage;
|
||||
@ -152,8 +156,8 @@
|
||||
import org.apache.hadoop.yarn.api.records.ReservationRequests;
|
||||
import org.apache.hadoop.yarn.api.records.Resource;
|
||||
import org.apache.hadoop.yarn.api.records.ResourceAllocationRequest;
|
||||
import org.apache.hadoop.yarn.api.records.ResourceInformation;
|
||||
import org.apache.hadoop.yarn.api.records.ResourceBlacklistRequest;
|
||||
import org.apache.hadoop.yarn.api.records.ResourceInformation;
|
||||
import org.apache.hadoop.yarn.api.records.ResourceOption;
|
||||
import org.apache.hadoop.yarn.api.records.ResourceRequest;
|
||||
import org.apache.hadoop.yarn.api.records.ResourceSizing;
|
||||
@ -183,10 +187,13 @@
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.ExecutionTypeRequestPBImpl;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.LocalResourcePBImpl;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.NMTokenPBImpl;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.NodeAttributeKeyPBImpl;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.NodeAttributeInfoPBImpl;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.NodeAttributePBImpl;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.NodeIdPBImpl;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.NodeLabelPBImpl;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.NodeReportPBImpl;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.NodeToAttributeValuePBImpl;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.PreemptionContainerPBImpl;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.PreemptionContractPBImpl;
|
||||
import org.apache.hadoop.yarn.api.records.impl.pb.PreemptionMessagePBImpl;
|
||||
@ -222,10 +229,14 @@
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ContainerStatusProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.ExecutionTypeRequestProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.LocalResourceProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeKeyProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeInfoProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeAttributeProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeIdProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeLabelProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeReportProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributeValueProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContainerProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.PreemptionContractProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.PreemptionMessageProto;
|
||||
@ -243,7 +254,6 @@
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.StrictPreemptionContractProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.URLProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.YarnClusterMetricsProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnProtos.NodeToAttributesProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsRequestProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.AddToClusterNodeLabelsResponseProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServerResourceManagerServiceProtos.CheckForDecommissioningNodesRequestProto;
|
||||
@ -274,6 +284,7 @@
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.AllocateResponseProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterRequestProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.FinishApplicationMasterResponseProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportRequestProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptReportResponseProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetApplicationAttemptsRequestProto;
|
||||
@ -304,6 +315,8 @@
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueInfoResponseProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoRequestProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetQueueUserAclsInfoResponseProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceRequestProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.IncreaseContainersResourceResponseProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.KillApplicationRequestProto;
|
||||
@ -328,9 +341,6 @@
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.StopContainersResponseProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationRequestProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.SubmitApplicationResponseProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetAllResourceProfilesResponseProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileRequestProto;
|
||||
import org.apache.hadoop.yarn.proto.YarnServiceProtos.GetResourceProfileResponseProto;
|
||||
import org.apache.hadoop.yarn.server.api.protocolrecords.NodeToAttributes;
|
||||
import org.apache.hadoop.yarn.server.api.protocolrecords.NodesToAttributesMappingRequest;
|
||||
import org.apache.hadoop.yarn.server.api.protocolrecords.impl.pb.AddToClusterNodeLabelsRequestPBImpl;
|
||||
@ -366,7 +376,6 @@
|
||||
import org.junit.Test;
|
||||
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import java.util.Arrays;
|
||||
|
||||
/**
|
||||
* Test class for YARN API protocol records.
|
||||
@ -450,10 +459,12 @@ public static void setup() throws Exception {
|
||||
generateByNewInstance(SchedulingRequest.class);
|
||||
generateByNewInstance(RejectedSchedulingRequest.class);
|
||||
//for Node attribute support
|
||||
generateByNewInstance(NodeAttributeKey.class);
|
||||
generateByNewInstance(NodeAttribute.class);
|
||||
generateByNewInstance(NodeToAttributes.class);
|
||||
generateByNewInstance(NodeToAttributeValue.class);
|
||||
generateByNewInstance(NodeAttributeInfo.class);
|
||||
generateByNewInstance(NodesToAttributesMappingRequest.class);
|
||||
|
||||
}
|
||||
|
||||
@Test
|
||||
@ -1249,11 +1260,29 @@ public void testGetAllResourceTypesInfoResponsePBImpl() throws Exception {
|
||||
YarnServiceProtos.GetAllResourceTypeInfoResponseProto.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNodeAttributeKeyPBImpl() throws Exception {
|
||||
validatePBImplRecord(NodeAttributeKeyPBImpl.class,
|
||||
NodeAttributeKeyProto.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNodeToAttributeValuePBImpl() throws Exception {
|
||||
validatePBImplRecord(NodeToAttributeValuePBImpl.class,
|
||||
NodeToAttributeValueProto.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNodeAttributePBImpl() throws Exception {
|
||||
validatePBImplRecord(NodeAttributePBImpl.class, NodeAttributeProto.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNodeAttributeInfoPBImpl() throws Exception {
|
||||
validatePBImplRecord(NodeAttributeInfoPBImpl.class,
|
||||
NodeAttributeInfoProto.class);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testNodeToAttributesPBImpl() throws Exception {
|
||||
validatePBImplRecord(NodeToAttributesPBImpl.class,
|
||||
|
@ -22,6 +22,7 @@
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.test.GenericTestUtils;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttribute;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeKey;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeType;
|
||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||
import org.junit.BeforeClass;
|
||||
@ -120,7 +121,8 @@ public void testNodeAttributesFetchInterval()
|
||||
while(times>0) {
|
||||
Set<NodeAttribute> current = spyProvider.getDescriptors();
|
||||
Assert.assertEquals(1, current.size());
|
||||
String attributeName = current.iterator().next().getAttributeName();
|
||||
String attributeName =
|
||||
current.iterator().next().getAttributeKey().getAttributeName();
|
||||
if ("host".equals(attributeName)){
|
||||
numOfOldValue++;
|
||||
} else if ("os".equals(attributeName)) {
|
||||
@ -173,7 +175,7 @@ public void testDisableFetchNodeAttributes() throws IOException,
|
||||
GenericTestUtils.waitFor(() -> {
|
||||
Set<NodeAttribute> attributes = spyProvider.getDescriptors();
|
||||
return "os".equalsIgnoreCase(attributes
|
||||
.iterator().next().getAttributeName());
|
||||
.iterator().next().getAttributeKey().getAttributeName());
|
||||
}, 500, 1000);
|
||||
} catch (Exception e) {
|
||||
// Make sure we get the timeout exception.
|
||||
@ -204,21 +206,22 @@ public void testParseConfiguration() throws IOException {
|
||||
Iterator<NodeAttribute> ait = attributes.iterator();
|
||||
|
||||
while(ait.hasNext()) {
|
||||
NodeAttribute at = ait.next();
|
||||
NodeAttribute attr = ait.next();
|
||||
NodeAttributeKey at = attr.getAttributeKey();
|
||||
if (at.getAttributeName().equals("hostname")) {
|
||||
Assert.assertEquals("hostname", at.getAttributeName());
|
||||
Assert.assertEquals(NodeAttribute.PREFIX_DISTRIBUTED,
|
||||
at.getAttributePrefix());
|
||||
Assert.assertEquals(NodeAttributeType.STRING,
|
||||
at.getAttributeType());
|
||||
Assert.assertEquals("host1234", at.getAttributeValue());
|
||||
attr.getAttributeType());
|
||||
Assert.assertEquals("host1234", attr.getAttributeValue());
|
||||
} else if (at.getAttributeName().equals("uptime")) {
|
||||
Assert.assertEquals("uptime", at.getAttributeName());
|
||||
Assert.assertEquals(NodeAttribute.PREFIX_DISTRIBUTED,
|
||||
at.getAttributePrefix());
|
||||
Assert.assertEquals(NodeAttributeType.STRING,
|
||||
at.getAttributeType());
|
||||
Assert.assertEquals("321543", at.getAttributeValue());
|
||||
attr.getAttributeType());
|
||||
Assert.assertEquals("321543", attr.getAttributeValue());
|
||||
} else {
|
||||
Assert.fail("Unexpected attribute");
|
||||
}
|
||||
|
@ -129,7 +129,7 @@ public void testNodeAttributeScriptProvider()
|
||||
.getDescriptors().iterator();
|
||||
while (it.hasNext()) {
|
||||
NodeAttribute att = it.next();
|
||||
switch (att.getAttributeName()) {
|
||||
switch (att.getAttributeKey().getAttributeName()) {
|
||||
case "host":
|
||||
Assert.assertEquals(NodeAttributeType.STRING, att.getAttributeType());
|
||||
Assert.assertEquals("host1234", att.getAttributeValue());
|
||||
@ -143,7 +143,8 @@ public void testNodeAttributeScriptProvider()
|
||||
Assert.assertEquals("10.0.0.1", att.getAttributeValue());
|
||||
break;
|
||||
default:
|
||||
Assert.fail("Unexpected attribute name " + att.getAttributeName());
|
||||
Assert.fail("Unexpected attribute name "
|
||||
+ att.getAttributeKey().getAttributeName());
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -1035,7 +1035,7 @@ private Map<String, Set<NodeAttribute>> validateAndFetch(
|
||||
List<NodeAttribute> nodeAttributes = nodeToAttributes.getNodeAttributes();
|
||||
if (!nodeAttributes.stream()
|
||||
.allMatch(nodeAttribute -> NodeAttribute.PREFIX_CENTRALIZED
|
||||
.equals(nodeAttribute.getAttributePrefix()))) {
|
||||
.equals(nodeAttribute.getAttributeKey().getAttributePrefix()))) {
|
||||
throw new IOException("Invalid Attribute Mapping for the node " + node
|
||||
+ ". Prefix should be " + NodeAttribute.PREFIX_CENTRALIZED);
|
||||
}
|
||||
|
@ -29,12 +29,14 @@
|
||||
import java.util.Collection;
|
||||
import java.util.Collections;
|
||||
import java.util.EnumSet;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
import java.util.concurrent.atomic.AtomicInteger;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.cli.UnrecognizedOptionException;
|
||||
import org.apache.commons.lang3.Range;
|
||||
@ -134,8 +136,11 @@
|
||||
import org.apache.hadoop.yarn.api.records.ContainerId;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerReport;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttribute;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeKey;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeInfo;
|
||||
import org.apache.hadoop.yarn.api.records.NodeReport;
|
||||
import org.apache.hadoop.yarn.api.records.NodeState;
|
||||
import org.apache.hadoop.yarn.api.records.NodeToAttributeValue;
|
||||
import org.apache.hadoop.yarn.api.records.Priority;
|
||||
import org.apache.hadoop.yarn.api.records.QueueACL;
|
||||
import org.apache.hadoop.yarn.api.records.QueueInfo;
|
||||
@ -155,6 +160,7 @@
|
||||
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
|
||||
import org.apache.hadoop.yarn.ipc.RPCUtil;
|
||||
import org.apache.hadoop.yarn.ipc.YarnRPC;
|
||||
import org.apache.hadoop.yarn.nodelabels.AttributeValue;
|
||||
import org.apache.hadoop.yarn.nodelabels.NodeAttributesManager;
|
||||
import org.apache.hadoop.yarn.security.client.RMDelegationTokenIdentifier;
|
||||
import org.apache.hadoop.yarn.server.resourcemanager.RMAuditLogger.AuditConstants;
|
||||
@ -1851,9 +1857,23 @@ public GetAttributesToNodesResponse getAttributesToNodes(
|
||||
GetAttributesToNodesRequest request) throws YarnException, IOException {
|
||||
NodeAttributesManager attributesManager =
|
||||
rmContext.getNodeAttributesManager();
|
||||
GetAttributesToNodesResponse response = GetAttributesToNodesResponse
|
||||
.newInstance(attributesManager
|
||||
.getAttributesToNodes(request.getNodeAttributes()));
|
||||
Map<NodeAttributeKey, List<NodeToAttributeValue>> attrToNodesWithStrVal =
|
||||
new HashMap<>();
|
||||
Map<NodeAttributeKey, Map<String, AttributeValue>> attributesToNodes =
|
||||
attributesManager.getAttributesToNodes(request.getNodeAttributes());
|
||||
for (Map.Entry<NodeAttributeKey, Map<String, AttributeValue>> attrib :
|
||||
attributesToNodes.entrySet()) {
|
||||
Map<String, AttributeValue> nodesToVal = attrib.getValue();
|
||||
List<NodeToAttributeValue> nodeToAttrValList = new ArrayList<>();
|
||||
for (Map.Entry<String, AttributeValue> nodeToVal : nodesToVal
|
||||
.entrySet()) {
|
||||
nodeToAttrValList.add(NodeToAttributeValue
|
||||
.newInstance(nodeToVal.getKey(), nodeToVal.getValue().getValue()));
|
||||
}
|
||||
attrToNodesWithStrVal.put(attrib.getKey(), nodeToAttrValList);
|
||||
}
|
||||
GetAttributesToNodesResponse response =
|
||||
GetAttributesToNodesResponse.newInstance(attrToNodesWithStrVal);
|
||||
return response;
|
||||
}
|
||||
|
||||
@ -1865,8 +1885,11 @@ public GetClusterNodeAttributesResponse getClusterNodeAttributes(
|
||||
rmContext.getNodeAttributesManager();
|
||||
Set<NodeAttribute> attributes =
|
||||
attributesManager.getClusterNodeAttributes(null);
|
||||
|
||||
GetClusterNodeAttributesResponse response =
|
||||
GetClusterNodeAttributesResponse.newInstance(attributes);
|
||||
GetClusterNodeAttributesResponse.newInstance(
|
||||
attributes.stream().map(attr -> NodeAttributeInfo.newInstance(attr))
|
||||
.collect(Collectors.toSet()));
|
||||
return response;
|
||||
}
|
||||
|
||||
|
@ -660,7 +660,7 @@ public NodeHeartbeatResponse nodeHeartbeat(NodeHeartbeatRequest request)
|
||||
// Validate attributes
|
||||
if (!nodeAttributes.stream().allMatch(
|
||||
nodeAttribute -> NodeAttribute.PREFIX_DISTRIBUTED
|
||||
.equals(nodeAttribute.getAttributePrefix()))) {
|
||||
.equals(nodeAttribute.getAttributeKey().getAttributePrefix()))) {
|
||||
// All attributes must be in same prefix: nm.yarn.io.
|
||||
// Since we have the checks in NM to make sure attributes reported
|
||||
// in HB are with correct prefix, so it should not reach here.
|
||||
|
@ -19,21 +19,20 @@
|
||||
package org.apache.hadoop.yarn.server.resourcemanager.nodelabels;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.HashMap;
|
||||
import java.util.HashSet;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Map.Entry;
|
||||
import java.util.Set;
|
||||
import java.util.TreeMap;
|
||||
import java.util.concurrent.ConcurrentHashMap;
|
||||
import java.util.concurrent.ConcurrentHashMap.KeySetView;
|
||||
import java.util.concurrent.ConcurrentMap;
|
||||
import java.util.concurrent.locks.ReentrantReadWriteLock;
|
||||
import java.util.concurrent.locks.ReentrantReadWriteLock.ReadLock;
|
||||
import java.util.concurrent.locks.ReentrantReadWriteLock.WriteLock;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import com.google.common.base.Strings;
|
||||
import org.apache.commons.lang3.StringUtils;
|
||||
@ -42,6 +41,7 @@
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.util.ReflectionUtils;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttribute;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeKey;
|
||||
import org.apache.hadoop.yarn.api.records.NodeId;
|
||||
import org.apache.hadoop.yarn.api.records.Resource;
|
||||
import org.apache.hadoop.yarn.conf.YarnConfiguration;
|
||||
@ -58,6 +58,8 @@
|
||||
import org.apache.hadoop.yarn.server.api.protocolrecords.AttributeMappingOperationType;
|
||||
import org.apache.hadoop.yarn.server.api.protocolrecords.NodeToAttributes;
|
||||
|
||||
import com.google.common.base.Strings;
|
||||
|
||||
/**
|
||||
* Manager holding the attributes to Labels.
|
||||
*/
|
||||
@ -75,8 +77,8 @@ public class NodeAttributesManagerImpl extends NodeAttributesManager {
|
||||
|
||||
// TODO may be we can have a better collection here.
|
||||
// this will be updated to get the attributeName to NM mapping
|
||||
private ConcurrentHashMap<NodeAttribute, RMNodeAttribute> clusterAttributes =
|
||||
new ConcurrentHashMap<>();
|
||||
private ConcurrentHashMap<NodeAttributeKey, RMNodeAttribute> clusterAttributes
|
||||
= new ConcurrentHashMap<>();
|
||||
|
||||
// hostname -> (Map (attributeName -> NodeAttribute))
|
||||
// Instead of NodeAttribute, plan to have it in future as AttributeValue
|
||||
@ -149,7 +151,7 @@ private NodeAttributeStore getAttributeStoreClass(Configuration conf) {
|
||||
private void internalUpdateAttributesOnNodes(
|
||||
Map<String, Map<NodeAttribute, AttributeValue>> nodeAttributeMapping,
|
||||
AttributeMappingOperationType op,
|
||||
Map<NodeAttribute, RMNodeAttribute> newAttributesToBeAdded,
|
||||
Map<NodeAttributeKey, RMNodeAttribute> newAttributesToBeAdded,
|
||||
String attributePrefix) {
|
||||
try {
|
||||
writeLock.lock();
|
||||
@ -210,13 +212,14 @@ private void internalUpdateAttributesOnNodes(
|
||||
private void removeNodeFromAttributes(String nodeHost,
|
||||
Set<NodeAttribute> attributeMappings) {
|
||||
for (NodeAttribute rmAttribute : attributeMappings) {
|
||||
RMNodeAttribute host = clusterAttributes.get(rmAttribute);
|
||||
RMNodeAttribute host =
|
||||
clusterAttributes.get(rmAttribute.getAttributeKey());
|
||||
if (host != null) {
|
||||
host.removeNode(nodeHost);
|
||||
// If there is no other host has such attribute,
|
||||
// remove it from the global mapping.
|
||||
if (host.getAssociatedNodeIds().isEmpty()) {
|
||||
clusterAttributes.remove(rmAttribute);
|
||||
clusterAttributes.remove(rmAttribute.getAttributeKey());
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -224,12 +227,16 @@ private void removeNodeFromAttributes(String nodeHost,
|
||||
|
||||
private void addNodeToAttribute(String nodeHost,
|
||||
Map<NodeAttribute, AttributeValue> attributeMappings) {
|
||||
for (NodeAttribute attribute : attributeMappings.keySet()) {
|
||||
RMNodeAttribute rmNodeAttribute = clusterAttributes.get(attribute);
|
||||
for (Entry<NodeAttribute, AttributeValue> attributeEntry : attributeMappings
|
||||
.entrySet()) {
|
||||
|
||||
RMNodeAttribute rmNodeAttribute =
|
||||
clusterAttributes.get(attributeEntry.getKey().getAttributeKey());
|
||||
if (rmNodeAttribute != null) {
|
||||
rmNodeAttribute.addNode(nodeHost);
|
||||
rmNodeAttribute.addNode(nodeHost, attributeEntry.getValue());
|
||||
} else {
|
||||
clusterAttributes.put(attribute, new RMNodeAttribute(attribute));
|
||||
clusterAttributes.put(attributeEntry.getKey().getAttributeKey(),
|
||||
new RMNodeAttribute(attributeEntry.getKey()));
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -257,7 +264,7 @@ private void replaceNodeToAttribute(String nodeHost, String prefix,
|
||||
*/
|
||||
protected Map<String, Map<NodeAttribute, AttributeValue>> validate(
|
||||
Map<String, Set<NodeAttribute>> nodeAttributeMapping,
|
||||
Map<NodeAttribute, RMNodeAttribute> newAttributesToBeAdded,
|
||||
Map<NodeAttributeKey, RMNodeAttribute> newAttributesToBeAdded,
|
||||
boolean isRemoveOperation) throws IOException {
|
||||
Map<String, Map<NodeAttribute, AttributeValue>> nodeToAttributesMap =
|
||||
new TreeMap<>();
|
||||
@ -274,19 +281,21 @@ protected Map<String, Map<NodeAttribute, AttributeValue>> validate(
|
||||
|
||||
// validate for attributes
|
||||
for (NodeAttribute attribute : nodeToAttrMappingEntry.getValue()) {
|
||||
String attributeName = attribute.getAttributeName().trim();
|
||||
NodeAttributeKey attributeKey = attribute.getAttributeKey();
|
||||
String attributeName = attributeKey.getAttributeName().trim();
|
||||
NodeLabelUtil.checkAndThrowLabelName(attributeName);
|
||||
NodeLabelUtil
|
||||
.checkAndThrowAttributePrefix(attribute.getAttributePrefix());
|
||||
.checkAndThrowAttributePrefix(attributeKey.getAttributePrefix());
|
||||
|
||||
// ensure trimmed values are set back
|
||||
attribute.setAttributeName(attributeName);
|
||||
attribute.setAttributePrefix(attribute.getAttributePrefix().trim());
|
||||
attributeKey.setAttributeName(attributeName);
|
||||
attributeKey
|
||||
.setAttributePrefix(attributeKey.getAttributePrefix().trim());
|
||||
|
||||
// verify for type against prefix/attributeName
|
||||
if (validateForAttributeTypeMismatch(isRemoveOperation, attribute,
|
||||
newAttributesToBeAdded)) {
|
||||
newAttributesToBeAdded.put(attribute,
|
||||
newAttributesToBeAdded.put(attribute.getAttributeKey(),
|
||||
new RMNodeAttribute(attribute));
|
||||
}
|
||||
// TODO type based value setting needs to be done using a factory
|
||||
@ -310,9 +319,11 @@ protected Map<String, Map<NodeAttribute, AttributeValue>> validate(
|
||||
*/
|
||||
private boolean validateForAttributeTypeMismatch(boolean isRemoveOperation,
|
||||
NodeAttribute attribute,
|
||||
Map<NodeAttribute, RMNodeAttribute> newAttributes)
|
||||
Map<NodeAttributeKey, RMNodeAttribute> newAttributes)
|
||||
throws IOException {
|
||||
if (isRemoveOperation && !clusterAttributes.containsKey(attribute)) {
|
||||
NodeAttributeKey attributeKey = attribute.getAttributeKey();
|
||||
if (isRemoveOperation
|
||||
&& !clusterAttributes.containsKey(attributeKey)) {
|
||||
// no need to validate anything as its remove operation and attribute
|
||||
// doesn't exist.
|
||||
return false; // no need to add as its remove operation
|
||||
@ -320,10 +331,10 @@ private boolean validateForAttributeTypeMismatch(boolean isRemoveOperation,
|
||||
// already existing or attribute is mapped to another Node in the
|
||||
// current command, then check whether the attribute type is matching
|
||||
NodeAttribute existingAttribute =
|
||||
(clusterAttributes.containsKey((attribute))
|
||||
? clusterAttributes.get(attribute).getAttribute()
|
||||
: (newAttributes.containsKey(attribute)
|
||||
? newAttributes.get(attribute).getAttribute()
|
||||
(clusterAttributes.containsKey(attributeKey)
|
||||
? clusterAttributes.get(attributeKey).getAttribute()
|
||||
: (newAttributes.containsKey(attributeKey)
|
||||
? newAttributes.get(attributeKey).getAttribute()
|
||||
: null));
|
||||
if (existingAttribute == null) {
|
||||
return true;
|
||||
@ -331,7 +342,7 @@ private boolean validateForAttributeTypeMismatch(boolean isRemoveOperation,
|
||||
.getAttributeType()) {
|
||||
throw new IOException("Attribute name - type is not matching with "
|
||||
+ "already configured mapping for the attribute "
|
||||
+ attribute.getAttributeName() + " existing : "
|
||||
+ attributeKey + " existing : "
|
||||
+ existingAttribute.getAttributeType() + ", new :"
|
||||
+ attribute.getAttributeType());
|
||||
}
|
||||
@ -347,37 +358,39 @@ protected String normalizeAttributeValue(String value) {
|
||||
}
|
||||
|
||||
@Override
|
||||
public Set<NodeAttribute> getClusterNodeAttributes(Set<String> prefix) {
|
||||
public Set<NodeAttribute> getClusterNodeAttributes(
|
||||
Set<String> prefix) {
|
||||
Set<NodeAttribute> attributes = new HashSet<>();
|
||||
KeySetView<NodeAttribute, RMNodeAttribute> allAttributes =
|
||||
clusterAttributes.keySet();
|
||||
Set<Entry<NodeAttributeKey, RMNodeAttribute>> allAttributes =
|
||||
clusterAttributes.entrySet();
|
||||
// Return all if prefix is not given.
|
||||
if (prefix == null || prefix.isEmpty()) {
|
||||
attributes.addAll(allAttributes);
|
||||
return attributes;
|
||||
}
|
||||
boolean forAllPrefix = prefix == null || prefix.isEmpty();
|
||||
// Try search attributes by prefix and return valid ones.
|
||||
Iterator<NodeAttribute> iterator = allAttributes.iterator();
|
||||
Iterator<Entry<NodeAttributeKey, RMNodeAttribute>> iterator =
|
||||
allAttributes.iterator();
|
||||
while (iterator.hasNext()) {
|
||||
NodeAttribute current = iterator.next();
|
||||
if (prefix.contains(current.getAttributePrefix())) {
|
||||
attributes.add(current);
|
||||
Entry<NodeAttributeKey, RMNodeAttribute> current = iterator.next();
|
||||
NodeAttributeKey attrID = current.getKey();
|
||||
RMNodeAttribute rmAttr = current.getValue();
|
||||
if (forAllPrefix || prefix.contains(attrID.getAttributePrefix())) {
|
||||
attributes.add(rmAttr.getAttribute());
|
||||
}
|
||||
}
|
||||
return attributes;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Map<NodeAttribute, Set<String>> getAttributesToNodes(
|
||||
Set<NodeAttribute> attributes) {
|
||||
public Map<NodeAttributeKey, Map<String, AttributeValue>> getAttributesToNodes(
|
||||
Set<NodeAttributeKey> attributes) {
|
||||
try {
|
||||
readLock.lock();
|
||||
boolean fetchAllAttributes = (attributes == null || attributes.isEmpty());
|
||||
Map<NodeAttribute, Set<String>> attributesToNodes = new HashMap<>();
|
||||
for (Entry<NodeAttribute, RMNodeAttribute> attributeEntry :
|
||||
Map<NodeAttributeKey, Map<String, AttributeValue>> attributesToNodes =
|
||||
new HashMap<>();
|
||||
for (Entry<NodeAttributeKey, RMNodeAttribute> attributeEntry :
|
||||
clusterAttributes.entrySet()) {
|
||||
if (fetchAllAttributes || attributes
|
||||
.contains(attributeEntry.getKey())) {
|
||||
if (fetchAllAttributes
|
||||
|| attributes.contains(attributeEntry.getKey())) {
|
||||
attributesToNodes.put(attributeEntry.getKey(),
|
||||
attributeEntry.getValue().getAssociatedNodeIds());
|
||||
}
|
||||
@ -391,8 +404,8 @@ public Map<NodeAttribute, Set<String>> getAttributesToNodes(
|
||||
public Resource getResourceByAttribute(NodeAttribute attribute) {
|
||||
try {
|
||||
readLock.lock();
|
||||
return clusterAttributes.containsKey(attribute)
|
||||
? clusterAttributes.get(attribute).getResource()
|
||||
return clusterAttributes.containsKey(attribute.getAttributeKey())
|
||||
? clusterAttributes.get(attribute.getAttributeKey()).getResource()
|
||||
: Resource.newInstance(0, 0);
|
||||
} finally {
|
||||
readLock.unlock();
|
||||
@ -425,7 +438,8 @@ public List<NodeToAttributes> getNodeToAttributes(Set<String> prefix) {
|
||||
attrs = new ArrayList<>();
|
||||
for (Entry<NodeAttribute, AttributeValue> nodeAttr : v.attributes
|
||||
.entrySet()) {
|
||||
if (prefix.contains(nodeAttr.getKey().getAttributePrefix())) {
|
||||
if (prefix.contains(
|
||||
nodeAttr.getKey().getAttributeKey().getAttributePrefix())) {
|
||||
attrs.add(nodeAttr.getKey());
|
||||
}
|
||||
}
|
||||
@ -473,7 +487,7 @@ public void activateNode(NodeId nodeId, Resource resource) {
|
||||
}
|
||||
host.activateNode(resource);
|
||||
for (NodeAttribute attribute : host.getAttributes().keySet()) {
|
||||
clusterAttributes.get(attribute).removeNode(resource);
|
||||
clusterAttributes.get(attribute.getAttributeKey()).removeNode(resource);
|
||||
}
|
||||
} finally {
|
||||
writeLock.unlock();
|
||||
@ -485,7 +499,8 @@ public void deactivateNode(NodeId nodeId) {
|
||||
writeLock.lock();
|
||||
Host host = nodeCollections.get(nodeId.getHost());
|
||||
for (NodeAttribute attribute : host.getAttributes().keySet()) {
|
||||
clusterAttributes.get(attribute).removeNode(host.getResource());
|
||||
clusterAttributes.get(attribute.getAttributeKey())
|
||||
.removeNode(host.getResource());
|
||||
}
|
||||
host.deactivateNode();
|
||||
} finally {
|
||||
@ -531,7 +546,8 @@ public void replaceAttributes(
|
||||
this.attributes.entrySet().iterator();
|
||||
while (it.hasNext()) {
|
||||
Entry<NodeAttribute, AttributeValue> current = it.next();
|
||||
if (prefix.equals(current.getKey().getAttributePrefix())) {
|
||||
if (prefix.equals(
|
||||
current.getKey().getAttributeKey().getAttributePrefix())) {
|
||||
it.remove();
|
||||
}
|
||||
}
|
||||
@ -659,7 +675,7 @@ private void processMapping(
|
||||
Map<String, Set<NodeAttribute>> nodeAttributeMapping,
|
||||
AttributeMappingOperationType mappingType, String attributePrefix)
|
||||
throws IOException {
|
||||
Map<NodeAttribute, RMNodeAttribute> newAttributesToBeAdded =
|
||||
Map<NodeAttributeKey, RMNodeAttribute> newAttributesToBeAdded =
|
||||
new HashMap<>();
|
||||
Map<String, Map<NodeAttribute, AttributeValue>> validMapping =
|
||||
validate(nodeAttributeMapping, newAttributesToBeAdded, false);
|
||||
|
@ -21,9 +21,11 @@
|
||||
import java.io.IOException;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.stream.Collectors;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttribute;
|
||||
import org.apache.hadoop.yarn.api.records.NodeLabel;
|
||||
|
||||
/**
|
||||
@ -56,4 +58,21 @@ public static void verifyCentralizedNodeLabelConfEnabled(String operation,
|
||||
throw new IOException(msg);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a set of node attributes whose name exists in the provided
|
||||
* <code>attributeNames</code> list.
|
||||
*
|
||||
* @param attributeNames For this given list of attribute names get the
|
||||
* cluster NodeAttributes
|
||||
* @param clusterNodeAttributes set of node Attributes
|
||||
* @return set of Node Attributes which maps to the give attributes names
|
||||
*/
|
||||
public static Set <NodeAttribute> getNodeAttributesByName(
|
||||
Set<String> attributeNames, Set<NodeAttribute> clusterNodeAttributes) {
|
||||
return clusterNodeAttributes.stream()
|
||||
.filter(attribute -> attributeNames
|
||||
.contains(attribute.getAttributeKey().getAttributeName()))
|
||||
.collect(Collectors.toSet());
|
||||
}
|
||||
}
|
||||
|
@ -41,8 +41,8 @@ public NodeAttributeInfo() {
|
||||
}
|
||||
|
||||
public NodeAttributeInfo(NodeAttribute nodeAttribute) {
|
||||
this.prefix = nodeAttribute.getAttributePrefix();
|
||||
this.name = nodeAttribute.getAttributeName();
|
||||
this.prefix = nodeAttribute.getAttributeKey().getAttributePrefix();
|
||||
this.name = nodeAttribute.getAttributeKey().getAttributeName();
|
||||
this.type = nodeAttribute.getAttributeType().toString();
|
||||
this.value = nodeAttribute.getAttributeValue();
|
||||
}
|
||||
|
@ -18,16 +18,6 @@
|
||||
|
||||
package org.apache.hadoop.yarn.server.resourcemanager;
|
||||
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetAttributesToNodesRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetAttributesToNodesResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeAttributesRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeAttributesResponse;
|
||||
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetNodesToAttributesRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetNodesToAttributesResponse;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttribute;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeType;
|
||||
import org.apache.hadoop.yarn.nodelabels.NodeAttributesManager;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.mockito.Matchers.any;
|
||||
@ -78,6 +68,10 @@
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationReportResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetApplicationsResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetAttributesToNodesRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetAttributesToNodesResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeAttributesRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeAttributesResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodeLabelsResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetClusterNodesRequest;
|
||||
@ -88,6 +82,8 @@
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetLabelsToNodesResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetNewReservationRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetNodesToAttributesRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetNodesToAttributesResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsRequest;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetNodesToLabelsResponse;
|
||||
import org.apache.hadoop.yarn.api.protocolrecords.GetQueueInfoRequest;
|
||||
@ -117,10 +113,15 @@
|
||||
import org.apache.hadoop.yarn.api.records.ContainerLaunchContext;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerState;
|
||||
import org.apache.hadoop.yarn.api.records.ContainerStatus;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttribute;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeInfo;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeKey;
|
||||
import org.apache.hadoop.yarn.api.records.NodeAttributeType;
|
||||
import org.apache.hadoop.yarn.api.records.NodeId;
|
||||
import org.apache.hadoop.yarn.api.records.NodeLabel;
|
||||
import org.apache.hadoop.yarn.api.records.NodeReport;
|
||||
import org.apache.hadoop.yarn.api.records.NodeState;
|
||||
import org.apache.hadoop.yarn.api.records.NodeToAttributeValue;
|
||||
import org.apache.hadoop.yarn.api.records.Priority;
|
||||
import org.apache.hadoop.yarn.api.records.QueueACL;
|
||||
import org.apache.hadoop.yarn.api.records.QueueConfigurations;
|
||||
@ -142,6 +143,7 @@
|
||||
import org.apache.hadoop.yarn.factories.RecordFactory;
|
||||
import org.apache.hadoop.yarn.factory.providers.RecordFactoryProvider;
|
||||
import org.apache.hadoop.yarn.ipc.YarnRPC;
|
||||
import org.apache.hadoop.yarn.nodelabels.NodeAttributesManager;
|
||||
import org.apache.hadoop.yarn.server.resourcemanager.ahs.RMApplicationHistoryWriter;
|
||||
import org.apache.hadoop.yarn.server.resourcemanager.metrics.SystemMetricsPublisher;
|
||||
import org.apache.hadoop.yarn.server.resourcemanager.nodelabels.RMNodeLabelsManager;
|
||||
@ -163,7 +165,6 @@
|
||||
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.fair.FairScheduler;
|
||||
import org.apache.hadoop.yarn.server.resourcemanager.security.QueueACLsManager;
|
||||
import org.apache.hadoop.yarn.server.resourcemanager.timelineservice.RMTimelineCollectorManager;
|
||||
|
||||
import org.apache.hadoop.yarn.server.scheduler.SchedulerRequestKey;
|
||||
import org.apache.hadoop.yarn.server.security.ApplicationACLsManager;
|
||||
import org.apache.hadoop.yarn.server.utils.BuilderUtils;
|
||||
@ -176,11 +177,11 @@
|
||||
import org.junit.Assert;
|
||||
import org.junit.Assume;
|
||||
import org.junit.Test;
|
||||
import org.mockito.invocation.InvocationOnMock;
|
||||
import org.mockito.stubbing.Answer;
|
||||
|
||||
import com.google.common.collect.ImmutableSet;
|
||||
import com.google.common.collect.Sets;
|
||||
import org.mockito.invocation.InvocationOnMock;
|
||||
import org.mockito.stubbing.Answer;
|
||||
|
||||
public class TestClientRMService {
|
||||
|
||||
@ -2048,11 +2049,12 @@ protected ClientRMService createClientRMService() {
|
||||
GetClusterNodeAttributesRequest.newInstance();
|
||||
GetClusterNodeAttributesResponse response =
|
||||
client.getClusterNodeAttributes(request);
|
||||
Set<NodeAttribute> attributes = response.getNodeAttributes();
|
||||
Set<NodeAttributeInfo> attributes = response.getNodeAttributes();
|
||||
Assert.assertEquals("Size not correct", 3, attributes.size());
|
||||
Assert.assertTrue(attributes.contains(gpu));
|
||||
Assert.assertTrue(attributes.contains(os));
|
||||
Assert.assertTrue(attributes.contains(docker));
|
||||
Assert.assertTrue(attributes.contains(NodeAttributeInfo.newInstance(gpu)));
|
||||
Assert.assertTrue(attributes.contains(NodeAttributeInfo.newInstance(os)));
|
||||
Assert
|
||||
.assertTrue(attributes.contains(NodeAttributeInfo.newInstance(docker)));
|
||||
rpc.stopProxy(client, conf);
|
||||
rm.close();
|
||||
}
|
||||
@ -2071,17 +2073,17 @@ protected ClientRMService createClientRMService() {
|
||||
NodeAttributesManager mgr = rm.getRMContext().getNodeAttributesManager();
|
||||
String node1 = "host1";
|
||||
String node2 = "host2";
|
||||
NodeAttribute gpu = NodeAttribute
|
||||
.newInstance(NodeAttribute.PREFIX_CENTRALIZED, "GPU",
|
||||
NodeAttributeType.STRING, "nvida");
|
||||
NodeAttribute os = NodeAttribute
|
||||
.newInstance(NodeAttribute.PREFIX_CENTRALIZED, "OS",
|
||||
NodeAttribute gpu =
|
||||
NodeAttribute.newInstance(NodeAttribute.PREFIX_CENTRALIZED, "GPU",
|
||||
NodeAttributeType.STRING, "nvidia");
|
||||
NodeAttribute os =
|
||||
NodeAttribute.newInstance(NodeAttribute.PREFIX_CENTRALIZED, "OS",
|
||||
NodeAttributeType.STRING, "windows64");
|
||||
NodeAttribute docker = NodeAttribute
|
||||
.newInstance(NodeAttribute.PREFIX_DISTRIBUTED, "DOCKER",
|
||||
NodeAttribute docker =
|
||||
NodeAttribute.newInstance(NodeAttribute.PREFIX_DISTRIBUTED, "DOCKER",
|
||||
NodeAttributeType.STRING, "docker0");
|
||||
NodeAttribute dist = NodeAttribute
|
||||
.newInstance(NodeAttribute.PREFIX_DISTRIBUTED, "VERSION",
|
||||
NodeAttribute dist =
|
||||
NodeAttribute.newInstance(NodeAttribute.PREFIX_DISTRIBUTED, "VERSION",
|
||||
NodeAttributeType.STRING, "3_0_2");
|
||||
Map<String, Set<NodeAttribute>> nodes = new HashMap<>();
|
||||
nodes.put(node1, ImmutableSet.of(gpu, os, dist));
|
||||
@ -2099,35 +2101,55 @@ protected ClientRMService createClientRMService() {
|
||||
GetAttributesToNodesRequest.newInstance();
|
||||
GetAttributesToNodesResponse response =
|
||||
client.getAttributesToNodes(request);
|
||||
Map<NodeAttribute, Set<String>> attrs = response.getAttributesToNodes();
|
||||
Map<NodeAttributeKey, List<NodeToAttributeValue>> attrs =
|
||||
response.getAttributesToNodes();
|
||||
Assert.assertEquals(response.getAttributesToNodes().size(), 4);
|
||||
Assert.assertEquals(attrs.get(dist).size(), 2);
|
||||
Assert.assertEquals(attrs.get(os).size(), 1);
|
||||
Assert.assertEquals(attrs.get(gpu).size(), 1);
|
||||
Assert.assertTrue(attrs.get(dist).contains(node1));
|
||||
Assert.assertTrue(attrs.get(dist).contains(node2));
|
||||
Assert.assertTrue(attrs.get(docker).contains(node2));
|
||||
Assert.assertEquals(attrs.get(dist.getAttributeKey()).size(), 2);
|
||||
Assert.assertEquals(attrs.get(os.getAttributeKey()).size(), 1);
|
||||
Assert.assertEquals(attrs.get(gpu.getAttributeKey()).size(), 1);
|
||||
Assert.assertTrue(findHostnameAndValInMapping(node1, "3_0_2",
|
||||
attrs.get(dist.getAttributeKey())));
|
||||
Assert.assertTrue(findHostnameAndValInMapping(node2, "3_0_2",
|
||||
attrs.get(dist.getAttributeKey())));
|
||||
Assert.assertTrue(findHostnameAndValInMapping(node2, "docker0",
|
||||
attrs.get(docker.getAttributeKey())));
|
||||
|
||||
GetAttributesToNodesRequest request2 =
|
||||
GetAttributesToNodesRequest.newInstance(ImmutableSet.of(docker));
|
||||
GetAttributesToNodesRequest request2 = GetAttributesToNodesRequest
|
||||
.newInstance(ImmutableSet.of(docker.getAttributeKey()));
|
||||
GetAttributesToNodesResponse response2 =
|
||||
client.getAttributesToNodes(request2);
|
||||
Map<NodeAttribute, Set<String>> attrs2 = response2.getAttributesToNodes();
|
||||
Assert.assertEquals(response2.getAttributesToNodes().size(), 1);
|
||||
Assert.assertTrue(attrs.get(docker).contains(node2));
|
||||
Map<NodeAttributeKey, List<NodeToAttributeValue>> attrs2 =
|
||||
response2.getAttributesToNodes();
|
||||
Assert.assertEquals(attrs2.size(), 1);
|
||||
Assert.assertTrue(findHostnameAndValInMapping(node2, "docker0",
|
||||
attrs2.get(docker.getAttributeKey())));
|
||||
|
||||
GetAttributesToNodesRequest request3 =
|
||||
GetAttributesToNodesRequest.newInstance(ImmutableSet.of(docker, os));
|
||||
GetAttributesToNodesRequest.newInstance(
|
||||
ImmutableSet.of(docker.getAttributeKey(), os.getAttributeKey()));
|
||||
GetAttributesToNodesResponse response3 =
|
||||
client.getAttributesToNodes(request3);
|
||||
Map<NodeAttribute, Set<String>> attrs3 = response3.getAttributesToNodes();
|
||||
Assert.assertEquals(response3.getAttributesToNodes().size(), 2);
|
||||
Assert.assertTrue(attrs.get(os).contains(node1));
|
||||
Assert.assertTrue(attrs.get(docker).contains(node2));
|
||||
Map<NodeAttributeKey, List<NodeToAttributeValue>> attrs3 =
|
||||
response3.getAttributesToNodes();
|
||||
Assert.assertEquals(attrs3.size(), 2);
|
||||
Assert.assertTrue(findHostnameAndValInMapping(node1, "windows64",
|
||||
attrs3.get(os.getAttributeKey())));
|
||||
Assert.assertTrue(findHostnameAndValInMapping(node2, "docker0",
|
||||
attrs3.get(docker.getAttributeKey())));
|
||||
rpc.stopProxy(client, conf);
|
||||
rm.close();
|
||||
}
|
||||
|
||||
private boolean findHostnameAndValInMapping(String hostname, String attrVal,
|
||||
List<NodeToAttributeValue> mappingVals) {
|
||||
for (NodeToAttributeValue value : mappingVals) {
|
||||
if (value.getHostname().equals(hostname)) {
|
||||
return attrVal.equals(value.getAttributeValue());
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
@Test(timeout = 120000)
|
||||
public void testGetNodesToAttributes() throws IOException, YarnException {
|
||||
MockRM rm = new MockRM() {
|
||||
|
@ -880,7 +880,7 @@ public void testNodeHeartbeatWithNodeAttributes() throws Exception {
|
||||
.getAttributesForNode(nodeId.getHost());
|
||||
Assert.assertEquals(1, attrs.size());
|
||||
NodeAttribute na = attrs.keySet().iterator().next();
|
||||
Assert.assertEquals("host", na.getAttributeName());
|
||||
Assert.assertEquals("host", na.getAttributeKey().getAttributeName());
|
||||
Assert.assertEquals("host2", na.getAttributeValue());
|
||||
Assert.assertEquals(NodeAttributeType.STRING, na.getAttributeType());
|
||||
|
||||
@ -900,7 +900,7 @@ public void testNodeHeartbeatWithNodeAttributes() throws Exception {
|
||||
attrs = attributeManager.getAttributesForNode(nodeId.getHost());
|
||||
Assert.assertEquals(1, attrs.size());
|
||||
na = attrs.keySet().iterator().next();
|
||||
Assert.assertEquals("host", na.getAttributeName());
|
||||
Assert.assertEquals("host", na.getAttributeKey().getAttributeName());
|
||||
Assert.assertEquals("host3", na.getAttributeValue());
|
||||
Assert.assertEquals(NodeAttributeType.STRING, na.getAttributeType());
|
||||
}
|
||||
|
@ -253,8 +253,10 @@ public void testRecoverFromEditLog() throws Exception {
|
||||
|
||||
public void checkNodeAttributeEqual(NodeAttribute atr1, NodeAttribute atr2) {
|
||||
Assert.assertEquals(atr1.getAttributeType(), atr2.getAttributeType());
|
||||
Assert.assertEquals(atr1.getAttributeName(), atr2.getAttributeName());
|
||||
Assert.assertEquals(atr1.getAttributePrefix(), atr2.getAttributePrefix());
|
||||
Assert.assertEquals(atr1.getAttributeKey().getAttributeName(),
|
||||
atr2.getAttributeKey().getAttributeName());
|
||||
Assert.assertEquals(atr1.getAttributeKey().getAttributePrefix(),
|
||||
atr2.getAttributeKey().getAttributePrefix());
|
||||
Assert.assertEquals(atr1.getAttributeValue(), atr2.getAttributeValue());
|
||||
}
|
||||
}
|
||||
|
@ -344,11 +344,12 @@ public void testReplaceNodeAttributes() throws IOException {
|
||||
clusterAttributes = attributesManager.getClusterNodeAttributes(
|
||||
Sets.newHashSet(NodeAttribute.PREFIX_DISTRIBUTED));
|
||||
Assert.assertEquals(1, clusterAttributes.size());
|
||||
NodeAttribute att = clusterAttributes.iterator().next();
|
||||
Assert.assertEquals("dist-node-attribute-v2_0", att.getAttributeName());
|
||||
NodeAttribute attr = clusterAttributes.iterator().next();
|
||||
Assert.assertEquals("dist-node-attribute-v2_0",
|
||||
attr.getAttributeKey().getAttributeName());
|
||||
Assert.assertEquals(NodeAttribute.PREFIX_DISTRIBUTED,
|
||||
att.getAttributePrefix());
|
||||
Assert.assertEquals("dist_v3_0", att.getAttributeValue());
|
||||
attr.getAttributeKey().getAttributePrefix());
|
||||
Assert.assertEquals("dist_v3_0", attr.getAttributeValue());
|
||||
|
||||
// Replace all attributes
|
||||
toReplaceMap.put(HOSTNAMES[0],
|
||||
|
@ -849,6 +849,6 @@ public GetClusterNodeAttributesResponse getClusterNodeAttributes(
|
||||
@Override
|
||||
public GetNodesToAttributesResponse getNodesToAttributes(
|
||||
GetNodesToAttributesRequest request) throws YarnException, IOException {
|
||||
throw new NotImplementedException();
|
||||
throw new NotImplementedException("Code is not implemented");
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user