HADOOP-15007. Stabilize and document Configuration <tag> element. Contributed by Ajay Kumar.
This commit is contained in:
parent
d1cd573687
commit
3688e491d5
@ -83,7 +83,6 @@
|
||||
|
||||
import com.google.common.base.Charsets;
|
||||
import org.apache.commons.collections.map.UnmodifiableMap;
|
||||
import org.apache.commons.io.FilenameUtils;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||
@ -195,6 +194,30 @@
|
||||
* parameters and these are suppressible by configuring
|
||||
* <tt>log4j.logger.org.apache.hadoop.conf.Configuration.deprecation</tt> in
|
||||
* log4j.properties file.
|
||||
*
|
||||
* <h4 id="Tags">Tags</h4>
|
||||
*
|
||||
* <p>Optionally we can tag related properties together by using tag
|
||||
* attributes. System tags are defined by hadoop.system.tags property. Users
|
||||
* can define there own custom tags in hadoop.custom.tags property.
|
||||
*
|
||||
* <p>For example, we can tag existing property as:
|
||||
* <tt><pre>
|
||||
* <property>
|
||||
* <name>dfs.replication</name>
|
||||
* <value>3</value>
|
||||
* <tag>HDFS,REQUIRED</tag>
|
||||
* </property>
|
||||
*
|
||||
* <property>
|
||||
* <name>dfs.data.transfer.protection</name>
|
||||
* <value>3</value>
|
||||
* <tag>HDFS,SECURITY</tag>
|
||||
* </property>
|
||||
* </pre></tt>
|
||||
* <p> Properties marked with tags can be retrieved with <tt>conf
|
||||
* .getAllPropertiesByTag("HDFS")</tt> or <tt>conf.getAllPropertiesByTags
|
||||
* (Arrays.asList("YARN","SECURITY"))</tt>.</p>
|
||||
*/
|
||||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Stable
|
||||
@ -206,6 +229,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
|
||||
private static final Logger LOG_DEPRECATION =
|
||||
LoggerFactory.getLogger(
|
||||
"org.apache.hadoop.conf.Configuration.deprecation");
|
||||
private static final Set<String> TAGS = new HashSet<>();
|
||||
|
||||
private boolean quietmode = true;
|
||||
|
||||
@ -296,15 +320,10 @@ private static boolean getRestrictParserDefault(Object resource) {
|
||||
private static final WeakHashMap<Configuration,Object> REGISTRY =
|
||||
new WeakHashMap<Configuration,Object>();
|
||||
|
||||
/**
|
||||
* Map to register all classes holding property tag enums.
|
||||
*/
|
||||
private static final Map<String, Class>
|
||||
REGISTERED_TAG_CLASS = new HashMap<>();
|
||||
/**
|
||||
* Map to hold properties by there tag groupings.
|
||||
*/
|
||||
private final Map<PropertyTag, Properties> propertyTagsMap =
|
||||
private final Map<String, Properties> propertyTagsMap =
|
||||
new ConcurrentHashMap<>();
|
||||
|
||||
/**
|
||||
@ -785,11 +804,6 @@ public Configuration() {
|
||||
public Configuration(boolean loadDefaults) {
|
||||
this.loadDefaults = loadDefaults;
|
||||
|
||||
// Register all classes holding property tags with
|
||||
REGISTERED_TAG_CLASS.put("core", CorePropertyTag.class);
|
||||
REGISTERED_TAG_CLASS.put("hdfs", HDFSPropertyTag.class);
|
||||
REGISTERED_TAG_CLASS.put("yarn", YarnPropertyTag.class);
|
||||
|
||||
synchronized(Configuration.class) {
|
||||
REGISTRY.put(this, null);
|
||||
}
|
||||
@ -820,7 +834,6 @@ public Configuration(Configuration other) {
|
||||
this.finalParameters = Collections.newSetFromMap(
|
||||
new ConcurrentHashMap<String, Boolean>());
|
||||
this.finalParameters.addAll(other.finalParameters);
|
||||
this.REGISTERED_TAG_CLASS.putAll(other.REGISTERED_TAG_CLASS);
|
||||
this.propertyTagsMap.putAll(other.propertyTagsMap);
|
||||
}
|
||||
|
||||
@ -2919,6 +2932,7 @@ private void loadResources(Properties properties,
|
||||
resources.set(i, ret);
|
||||
}
|
||||
}
|
||||
this.removeUndeclaredTags(properties);
|
||||
}
|
||||
|
||||
private Resource loadResource(Properties properties,
|
||||
@ -3123,7 +3137,7 @@ private Resource loadResource(Properties properties,
|
||||
break;
|
||||
}
|
||||
confSource.add(name);
|
||||
//Read tags and put them in propertyTagsMap
|
||||
// Read tags and put them in propertyTagsMap
|
||||
if (confTag != null) {
|
||||
readTagFromConfig(confTag, confName, confValue, confSource);
|
||||
}
|
||||
@ -3165,48 +3179,61 @@ private Resource loadResource(Properties properties,
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Removes undeclared tags and related properties from propertyTagsMap.
|
||||
* Its required because ordering of properties in xml config files is not
|
||||
* guaranteed.
|
||||
* @param prop
|
||||
*/
|
||||
private void removeUndeclaredTags(Properties prop) {
|
||||
// Get all system tags
|
||||
if (prop.containsKey(CommonConfigurationKeys.HADOOP_SYSTEM_TAGS)){
|
||||
String systemTags = prop.getProperty(CommonConfigurationKeys
|
||||
.HADOOP_SYSTEM_TAGS);
|
||||
Arrays.stream(systemTags.split(",")).forEach(tag -> TAGS.add(tag));
|
||||
}
|
||||
// Get all custom tags
|
||||
if (prop.containsKey(CommonConfigurationKeys.HADOOP_CUSTOM_TAGS)) {
|
||||
String customTags = prop.getProperty(CommonConfigurationKeys
|
||||
.HADOOP_CUSTOM_TAGS);
|
||||
Arrays.stream(customTags.split(",")).forEach(tag -> TAGS.add(tag));
|
||||
}
|
||||
|
||||
Set undeclaredTags = propertyTagsMap.keySet();
|
||||
if (undeclaredTags.retainAll(TAGS)) {
|
||||
LOG.info("Removed undeclared tags:");
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Read the values passed as tags and store them in a
|
||||
* map for later retrieval.
|
||||
* @param attributeValue
|
||||
* @param confName
|
||||
* @param confValue
|
||||
* @param confSource
|
||||
*/
|
||||
private void readTagFromConfig(String attributeValue, String confName, String
|
||||
confValue, List<String> confSource) {
|
||||
for (String tagStr : attributeValue.split(",")) {
|
||||
tagStr = tagStr.trim();
|
||||
try {
|
||||
if (confSource.size() > 0) {
|
||||
for (String source : confSource) {
|
||||
PropertyTag tag1 = this.getPropertyTag(tagStr,
|
||||
FilenameUtils.getName(source).split("-")[0]);
|
||||
if (tag1 != null) {
|
||||
//Handle property with no/null value
|
||||
if (confValue == null) {
|
||||
confValue = "";
|
||||
}
|
||||
if (propertyTagsMap.containsKey(tag1)) {
|
||||
propertyTagsMap.get(tag1).setProperty(confName, confValue);
|
||||
} else {
|
||||
Properties props = new Properties();
|
||||
props.setProperty(confName, confValue);
|
||||
propertyTagsMap.put(tag1, props);
|
||||
}
|
||||
}
|
||||
}
|
||||
// Handle property with no/null value
|
||||
if (confValue == null) {
|
||||
confValue = "";
|
||||
}
|
||||
if (propertyTagsMap.containsKey(tagStr)) {
|
||||
propertyTagsMap.get(tagStr).setProperty(confName, confValue);
|
||||
} else {
|
||||
// If no source is set try to find tag in CorePropertyTag
|
||||
if (propertyTagsMap.containsKey(CorePropertyTag.valueOf(tagStr))) {
|
||||
propertyTagsMap.get(CorePropertyTag.valueOf(tagStr))
|
||||
.setProperty(confName, confValue);
|
||||
} else {
|
||||
Properties props = new Properties();
|
||||
props.setProperty(confName, confValue);
|
||||
propertyTagsMap.put(CorePropertyTag.valueOf(tagStr),
|
||||
props);
|
||||
}
|
||||
Properties props = new Properties();
|
||||
props.setProperty(confName, confValue);
|
||||
propertyTagsMap.put(tagStr, props);
|
||||
}
|
||||
} catch (Exception ex) {
|
||||
// Log the invalid tag and continue to parse rest of the properties.
|
||||
LOG.info("Invalid tag '" + tagStr + "' found for "
|
||||
+ "property:" + confName + " Source:" + Arrays
|
||||
.toString(confSource.toArray()), ex);
|
||||
// Log the exception at trace level.
|
||||
LOG.trace("Tag '{}' for property:{} Source:{}", tagStr, confName,
|
||||
Arrays.toString(confSource.toArray()), ex);
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@ -3690,9 +3717,10 @@ public static boolean hasWarnedDeprecation(String name) {
|
||||
|
||||
/**
|
||||
* Get all properties belonging to tag.
|
||||
* @return Properties with matching properties
|
||||
* @param tag tag
|
||||
* @return Properties with matching tag
|
||||
*/
|
||||
public Properties getAllPropertiesByTag(final PropertyTag tag) {
|
||||
public Properties getAllPropertiesByTag(final String tag) {
|
||||
Properties props = new Properties();
|
||||
if (propertyTagsMap.containsKey(tag)) {
|
||||
props.putAll(propertyTagsMap.get(tag));
|
||||
@ -3703,12 +3731,12 @@ public Properties getAllPropertiesByTag(final PropertyTag tag) {
|
||||
/**
|
||||
* Get all properties belonging to list of input tags. Calls
|
||||
* getAllPropertiesByTag internally.
|
||||
*
|
||||
* @return Properties with all matching properties
|
||||
* @param tagList list of input tags
|
||||
* @return Properties with matching tags
|
||||
*/
|
||||
public Properties getAllPropertiesByTags(final List<PropertyTag> tagList) {
|
||||
public Properties getAllPropertiesByTags(final List<String> tagList) {
|
||||
Properties prop = new Properties();
|
||||
for (PropertyTag tag : tagList) {
|
||||
for (String tag : tagList) {
|
||||
prop.putAll(this.getAllPropertiesByTag(tag));
|
||||
}
|
||||
return prop;
|
||||
@ -3718,15 +3746,10 @@ public Properties getAllPropertiesByTags(final List<PropertyTag> tagList) {
|
||||
* Get Property tag Enum corresponding to given source.
|
||||
*
|
||||
* @param tagStr String representation of Enum
|
||||
* @param group Group to which enum belongs.Ex hdfs,yarn
|
||||
* @return Properties with all matching properties
|
||||
* @return true if tagStr is a valid tag
|
||||
*/
|
||||
private PropertyTag getPropertyTag(String tagStr, String group) {
|
||||
PropertyTag tag = null;
|
||||
if (REGISTERED_TAG_CLASS.containsKey(group)) {
|
||||
tag = (PropertyTag) Enum.valueOf(REGISTERED_TAG_CLASS.get(group), tagStr);
|
||||
}
|
||||
return tag;
|
||||
public boolean isPropertyTag(String tagStr) {
|
||||
return this.TAGS.contains(tagStr);
|
||||
}
|
||||
|
||||
private void putIntoUpdatingResource(String key, String[] value) {
|
||||
|
@ -1,37 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with this
|
||||
* work for additional information regarding copyright ownership. The ASF
|
||||
* licenses this file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
* License for the specific language governing permissions and limitations under
|
||||
* the License.
|
||||
*/
|
||||
package org.apache.hadoop.conf;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
|
||||
/***************************************************************************
|
||||
* Enum for tagging hadoop core properties according to there usage.
|
||||
* CorePropertyTag implements the
|
||||
* {@link org.apache.hadoop.conf.PropertyTag} interface,
|
||||
***************************************************************************/
|
||||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Evolving
|
||||
public enum CorePropertyTag implements PropertyTag {
|
||||
CORE,
|
||||
REQUIRED,
|
||||
PERFORMANCE,
|
||||
CLIENT,
|
||||
SERVER,
|
||||
SECURITY,
|
||||
DEBUG
|
||||
}
|
@ -1,41 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with this
|
||||
* work for additional information regarding copyright ownership. The ASF
|
||||
* licenses this file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
* License for the specific language governing permissions and limitations under
|
||||
* the License.
|
||||
*/
|
||||
package org.apache.hadoop.conf;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
|
||||
/***************************************************************************
|
||||
* Enum for tagging hdfs properties according to there usage or application.
|
||||
* HDFSPropertyTag implements the
|
||||
* {@link org.apache.hadoop.conf.PropertyTag} interface,
|
||||
***************************************************************************/
|
||||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Evolving
|
||||
public enum HDFSPropertyTag implements PropertyTag {
|
||||
HDFS,
|
||||
NAMENODE,
|
||||
DATANODE,
|
||||
REQUIRED,
|
||||
SECURITY,
|
||||
KERBEROS,
|
||||
PERFORMANCE,
|
||||
CLIENT,
|
||||
SERVER,
|
||||
DEBUG,
|
||||
DEPRICATED
|
||||
}
|
@ -1,30 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with this
|
||||
* work for additional information regarding copyright ownership. The ASF
|
||||
* licenses this file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
* License for the specific language governing permissions and limitations under
|
||||
* the License.
|
||||
*/
|
||||
package org.apache.hadoop.conf;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
|
||||
/***********************************************************
|
||||
* PropertyTag is used for creating extendable property tag Enums.
|
||||
* Property tags will group related properties together.
|
||||
***********************************************************/
|
||||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Evolving
|
||||
public interface PropertyTag {
|
||||
|
||||
}
|
@ -1,39 +0,0 @@
|
||||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
||||
* contributor license agreements. See the NOTICE file distributed with this
|
||||
* work for additional information regarding copyright ownership. The ASF
|
||||
* licenses this file to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance with the License.
|
||||
* You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
||||
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
||||
* License for the specific language governing permissions and limitations under
|
||||
* the License.
|
||||
*/
|
||||
package org.apache.hadoop.conf;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
|
||||
/***************************************************************************
|
||||
* Enum for tagging yarn properties according to there usage or application.
|
||||
* YarnPropertyTag implements the
|
||||
* {@link org.apache.hadoop.conf.PropertyTag} interface,
|
||||
***************************************************************************/
|
||||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Evolving
|
||||
public enum YarnPropertyTag implements PropertyTag {
|
||||
YARN,
|
||||
RESOURCEMANAGER,
|
||||
SECURITY,
|
||||
KERBEROS,
|
||||
PERFORMANCE,
|
||||
CLIENT,
|
||||
REQUIRED,
|
||||
SERVER,
|
||||
DEBUG
|
||||
}
|
@ -860,5 +860,7 @@ public class CommonConfigurationKeysPublic {
|
||||
"credential$",
|
||||
"oauth.*token$",
|
||||
HADOOP_SECURITY_SENSITIVE_CONFIG_KEYS);
|
||||
public static final String HADOOP_SYSTEM_TAGS = "hadoop.system.tags";
|
||||
public static final String HADOOP_CUSTOM_TAGS = "hadoop.custom.tags";
|
||||
}
|
||||
|
||||
|
@ -2959,4 +2959,12 @@
|
||||
HADOOP-13805,HADOOP-13558.
|
||||
</description>
|
||||
</property>
|
||||
<property>
|
||||
<name>hadoop.system.tags</name>
|
||||
<value>YARN,HDFS,NAMENODE,DATANODE,REQUIRED,SECURITY,KERBEROS,PERFORMANCE,CLIENT
|
||||
,SERVER,DEBUG,DEPRICATED,COMMON,OPTIONAL</value>
|
||||
<description>
|
||||
System tags to group related properties together.
|
||||
</description>
|
||||
</property>
|
||||
</configuration>
|
||||
|
@ -26,11 +26,14 @@
|
||||
import java.io.IOException;
|
||||
import java.io.InputStream;
|
||||
import java.io.OutputStreamWriter;
|
||||
import java.io.PrintStream;
|
||||
import java.io.StringWriter;
|
||||
import java.net.InetAddress;
|
||||
import java.net.InetSocketAddress;
|
||||
import java.net.URI;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collection;
|
||||
@ -2339,22 +2342,29 @@ public static void main(String[] argv) throws Exception {
|
||||
@Test
|
||||
public void testGetAllPropertiesByTags() throws Exception {
|
||||
|
||||
out = new BufferedWriter(new FileWriter(CONFIG_CORE));
|
||||
startConfig();
|
||||
appendPropertyByTag("dfs.cblock.trace.io", "false", "DEBUG");
|
||||
appendPropertyByTag("dfs.replication", "1", "PERFORMANCE,REQUIRED");
|
||||
appendPropertyByTag("dfs.namenode.logging.level", "INFO", "CLIENT,DEBUG");
|
||||
endConfig();
|
||||
try{
|
||||
out = new BufferedWriter(new FileWriter(CONFIG_CORE));
|
||||
startConfig();
|
||||
appendProperty("hadoop.system.tags", "YARN,HDFS,NAMENODE");
|
||||
appendProperty("hadoop.custom.tags", "MYCUSTOMTAG");
|
||||
appendPropertyByTag("dfs.cblock.trace.io", "false", "YARN");
|
||||
appendPropertyByTag("dfs.replication", "1", "HDFS");
|
||||
appendPropertyByTag("dfs.namenode.logging.level", "INFO", "NAMENODE");
|
||||
appendPropertyByTag("dfs.random.key", "XYZ", "MYCUSTOMTAG");
|
||||
endConfig();
|
||||
|
||||
Path fileResource = new Path(CONFIG_CORE);
|
||||
conf.addResource(fileResource);
|
||||
conf.getProps();
|
||||
Path fileResource = new Path(CONFIG_CORE);
|
||||
conf.addResource(fileResource);
|
||||
conf.getProps();
|
||||
|
||||
List<PropertyTag> tagList = new ArrayList<>();
|
||||
tagList.add(CorePropertyTag.REQUIRED);
|
||||
tagList.add(CorePropertyTag.PERFORMANCE);
|
||||
tagList.add(CorePropertyTag.DEBUG);
|
||||
tagList.add(CorePropertyTag.CLIENT);
|
||||
} finally {
|
||||
out.close();
|
||||
}
|
||||
System.out.println(Files.readAllLines(Paths.get(CONFIG_CORE)));
|
||||
List<String> tagList = new ArrayList<>();
|
||||
tagList.add("YARN");
|
||||
tagList.add("HDFS");
|
||||
tagList.add("NAMENODE");
|
||||
|
||||
Properties properties = conf.getAllPropertiesByTags(tagList);
|
||||
String[] sources = conf.getPropertySources("dfs.replication");
|
||||
@ -2366,58 +2376,45 @@ public void testGetAllPropertiesByTags() throws Exception {
|
||||
assertEq(true, properties.containsKey("dfs.replication"));
|
||||
assertEq(true, properties.containsKey("dfs.cblock.trace.io"));
|
||||
assertEq(false, properties.containsKey("namenode.host"));
|
||||
|
||||
properties = conf.getAllPropertiesByTag("DEBUG");
|
||||
assertEq(0, properties.size());
|
||||
assertEq(false, properties.containsKey("dfs.namenode.logging.level"));
|
||||
assertEq(true, conf.isPropertyTag("YARN"));
|
||||
assertEq(true, conf.isPropertyTag("HDFS"));
|
||||
assertEq(true, conf.isPropertyTag("NAMENODE"));
|
||||
assertEq(true, conf.isPropertyTag("MYCUSTOMTAG"));
|
||||
assertEq(false, conf.isPropertyTag("CMYCUSTOMTAG2"));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetAllPropertiesWithSourceByTags() throws Exception {
|
||||
public void testInvalidTags() throws Exception {
|
||||
PrintStream output = System.out;
|
||||
try {
|
||||
ByteArrayOutputStream bytes = new ByteArrayOutputStream();
|
||||
System.setOut(new PrintStream(bytes));
|
||||
|
||||
out = new BufferedWriter(new FileWriter(CONFIG));
|
||||
startConfig();
|
||||
appendPropertyByTag("dfs.cblock.trace.io", "false", "DEBUG",
|
||||
"hdfs-default.xml", "core-site.xml");
|
||||
appendPropertyByTag("dfs.replication", "1", "PERFORMANCE,HDFS",
|
||||
"hdfs-default.xml");
|
||||
appendPropertyByTag("yarn.resourcemanager.work-preserving-recovery"
|
||||
+ ".enabled", "INFO", "CLIENT,DEBUG", "yarn-default.xml", "yarn-site"
|
||||
+ ".xml");
|
||||
endConfig();
|
||||
out = new BufferedWriter(new FileWriter(CONFIG));
|
||||
startConfig();
|
||||
appendPropertyByTag("dfs.cblock.trace.io", "false", "MYOWNTAG,TAG2");
|
||||
endConfig();
|
||||
|
||||
Path fileResource = new Path(CONFIG);
|
||||
conf.addResource(fileResource);
|
||||
conf.getProps();
|
||||
Path fileResource = new Path(CONFIG);
|
||||
conf.addResource(fileResource);
|
||||
conf.getProps();
|
||||
|
||||
List<PropertyTag> tagList = new ArrayList<>();
|
||||
tagList.add(CorePropertyTag.REQUIRED);
|
||||
List<String> tagList = new ArrayList<>();
|
||||
tagList.add("REQUIRED");
|
||||
tagList.add("MYOWNTAG");
|
||||
tagList.add("TAG2");
|
||||
|
||||
Properties properties;
|
||||
properties = conf.getAllPropertiesByTags(tagList);
|
||||
assertNotEquals(3, properties.size());
|
||||
|
||||
tagList.add(HDFSPropertyTag.DEBUG);
|
||||
tagList.add(YarnPropertyTag.CLIENT);
|
||||
tagList.add(HDFSPropertyTag.PERFORMANCE);
|
||||
tagList.add(HDFSPropertyTag.HDFS);
|
||||
properties = conf.getAllPropertiesByTags(tagList);
|
||||
assertEq(3, properties.size());
|
||||
|
||||
assertEq(true, properties.containsKey("dfs.cblock.trace.io"));
|
||||
assertEq(true, properties.containsKey("dfs.replication"));
|
||||
assertEq(true, properties
|
||||
.containsKey("yarn.resourcemanager.work-preserving-recovery.enabled"));
|
||||
assertEq(false, properties.containsKey("namenode.host"));
|
||||
|
||||
tagList.clear();
|
||||
tagList.add(HDFSPropertyTag.DEBUG);
|
||||
properties = conf.getAllPropertiesByTags(tagList);
|
||||
assertEq(true, properties.containsKey("dfs.cblock.trace.io"));
|
||||
assertEq(false, properties.containsKey("yarn.resourcemanager"
|
||||
+ ".work-preserving-recovery"));
|
||||
|
||||
tagList.clear();
|
||||
tagList.add(YarnPropertyTag.DEBUG);
|
||||
properties = conf.getAllPropertiesByTags(tagList);
|
||||
assertEq(false, properties.containsKey("dfs.cblock.trace.io"));
|
||||
assertEq(true, properties.containsKey("yarn.resourcemanager"
|
||||
+ ".work-preserving-recovery.enabled"));
|
||||
Properties properties = conf.getAllPropertiesByTags(tagList);
|
||||
assertEq(0, properties.size());
|
||||
assertFalse(properties.containsKey("dfs.cblock.trace.io"));
|
||||
assertFalse(bytes.toString().contains("Invalid tag "));
|
||||
assertFalse(bytes.toString().contains("Tag"));
|
||||
} finally {
|
||||
System.setOut(output);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
Loading…
Reference in New Issue
Block a user