HDFS-12350. Support meta tags in configs. Contributed by Ajay Kumar.

This commit is contained in:
Anu Engineer 2017-09-07 12:40:09 -07:00
parent 83449abb40
commit a4cd101934
6 changed files with 373 additions and 2 deletions

View File

@ -259,7 +259,18 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
*/
private static final WeakHashMap<Configuration,Object> REGISTRY =
new WeakHashMap<Configuration,Object>();
/**
* Map to register all classes holding property tag enums.
*/
private static final Map<String, Class>
REGISTERED_TAG_CLASS = new HashMap<>();
/**
* Map to hold properties by there tag groupings.
*/
private final Map<PropertyTag, Properties> propertyTagsMap =
new ConcurrentHashMap<>();
/**
* List of default Resources. Resources are loaded in the order of the list
* entries
@ -738,6 +749,12 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
public Configuration(boolean loadDefaults) {
this.loadDefaults = loadDefaults;
updatingResource = new ConcurrentHashMap<String, String[]>();
// Register all classes holding property tags with
REGISTERED_TAG_CLASS.put("core", CorePropertyTag.class);
REGISTERED_TAG_CLASS.put("hdfs", HDFSPropertyTag.class);
REGISTERED_TAG_CLASS.put("yarn", YarnPropertyTag.class);
synchronized(Configuration.class) {
REGISTRY.put(this, null);
}
@ -765,6 +782,8 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
this.finalParameters = Collections.newSetFromMap(
new ConcurrentHashMap<String, Boolean>());
this.finalParameters.addAll(other.finalParameters);
this.REGISTERED_TAG_CLASS.putAll(other.REGISTERED_TAG_CLASS);
this.propertyTagsMap.putAll(other.propertyTagsMap);
}
synchronized(Configuration.class) {
@ -2823,6 +2842,10 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
} else if ("source".equals(propertyAttr)) {
confSource.add(StringInterner.weakIntern(
reader.getAttributeValue(i)));
} else if ("tag".equals(propertyAttr)) {
//Read tags and put them in propertyTagsMap
readTagFromConfig(reader.getAttributeValue(i), confName,
confValue, confSource);
}
}
break;
@ -2830,6 +2853,7 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
case "value":
case "final":
case "source":
case "tag":
parseToken = true;
token.setLength(0);
break;
@ -2911,6 +2935,13 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
case "source":
confSource.add(StringInterner.weakIntern(token.toString()));
break;
case "tag":
if (token.length() > 0) {
//Read tags and put them in propertyTagsMap
readTagFromConfig(token.toString(), confName,
confValue, confSource);
}
break;
case "include":
if (fallbackAllowed && !fallbackEntered) {
throw new IOException("Fetch fail on include for '"
@ -2962,6 +2993,48 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
}
}
private void readTagFromConfig(String attributeValue, String confName, String
confValue, List<String> confSource) {
for (String tagStr : attributeValue.split(",")) {
tagStr = tagStr.trim();
try {
if (confSource.size() > 0) {
for (String source : confSource) {
PropertyTag tag1 = this.getPropertyTag(tagStr,
source.split("-")[0]);
if (propertyTagsMap.containsKey(tag1)) {
propertyTagsMap.get(tag1)
.setProperty(confName, confValue);
} else {
Properties props = new Properties();
props.setProperty(confName, confValue);
propertyTagsMap.put(tag1, props);
}
}
} else {
//If no source is set try to find tag in CorePropertyTag
if (propertyTagsMap
.containsKey(CorePropertyTag.valueOf(tagStr)
)) {
propertyTagsMap.get(CorePropertyTag.valueOf(tagStr))
.setProperty(confName, confValue);
} else {
Properties props = new Properties();
props.setProperty(confName, confValue);
propertyTagsMap.put(CorePropertyTag.valueOf(tagStr),
props);
}
}
} catch (IllegalArgumentException iae) {
//Log the invalid tag and continue to parse rest of the
// properties.
LOG.info("Invalid tag '" + tagStr + "' found for "
+ "property:" + confName, iae);
}
}
}
private void overlay(Properties to, Properties from) {
for (Entry<Object, Object> entry: from.entrySet()) {
to.put(entry.getKey(), entry.getValue());
@ -3438,4 +3511,45 @@ public class Configuration implements Iterable<Map.Entry<String,String>>,
}
return false;
}
/**
* Get all properties belonging to tag.
* @return Properties with matching properties
*/
public Properties getAllPropertiesByTag(final PropertyTag tag) {
Properties props = new Properties();
if (propertyTagsMap.containsKey(tag)) {
props.putAll(propertyTagsMap.get(tag));
}
return props;
}
/**
* Get all properties belonging to list of input tags. Calls
* getAllPropertiesByTag internally.
*
* @return Properties with all matching properties
*/
public Properties getAllPropertiesByTags(final List<PropertyTag> tagList) {
Properties prop = new Properties();
for (PropertyTag tag : tagList) {
prop.putAll(this.getAllPropertiesByTag(tag));
}
return prop;
}
/**
* Get Property tag Enum corresponding to given source.
*
* @param tagStr String representation of Enum
* @param group Group to which enum belongs.Ex hdfs,yarn
* @return Properties with all matching properties
*/
private PropertyTag getPropertyTag(String tagStr, String group) {
PropertyTag tag = null;
if (REGISTERED_TAG_CLASS.containsKey(group)) {
tag = (PropertyTag) Enum.valueOf(REGISTERED_TAG_CLASS.get(group), tagStr);
}
return tag;
}
}

View File

@ -0,0 +1,37 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.hadoop.conf;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/***************************************************************************
* Enum for tagging hadoop core properties according to there usage.
* CorePropertyTag implements the
* {@link org.apache.hadoop.conf.PropertyTag} interface,
***************************************************************************/
@InterfaceAudience.Private
@InterfaceStability.Evolving
public enum CorePropertyTag implements PropertyTag {
CORE,
REQUIRED,
PERFORMANCE,
CLIENT,
SERVER,
SECURITY,
DEBUG
}

View File

@ -0,0 +1,41 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.hadoop.conf;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/***************************************************************************
* Enum for tagging hdfs properties according to there usage or application.
* HDFSPropertyTag implements the
* {@link org.apache.hadoop.conf.PropertyTag} interface,
***************************************************************************/
@InterfaceAudience.Private
@InterfaceStability.Evolving
public enum HDFSPropertyTag implements PropertyTag {
HDFS,
NAMENODE,
DATANODE,
REQUIRED,
SECURITY,
KERBEROS,
PERFORMANCE,
CLIENT,
SERVER,
DEBUG,
DEPRICATED
}

View File

@ -0,0 +1,30 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.hadoop.conf;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/***********************************************************
* PropertyTag is used for creating extendable property tag Enums.
* Property tags will group related properties together.
***********************************************************/
@InterfaceAudience.Private
@InterfaceStability.Evolving
public interface PropertyTag {
}

View File

@ -0,0 +1,39 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.hadoop.conf;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/***************************************************************************
* Enum for tagging yarn properties according to there usage or application.
* YarnPropertyTag implements the
* {@link org.apache.hadoop.conf.PropertyTag} interface,
***************************************************************************/
@InterfaceAudience.Private
@InterfaceStability.Evolving
public enum YarnPropertyTag implements PropertyTag {
YARN,
RESOURCEMANAGER,
SECURITY,
KERBEROS,
PERFORMANCE,
CLIENT,
REQUIRED,
SERVER,
DEBUG
}

View File

@ -38,6 +38,7 @@ import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Random;
import java.util.Set;
import java.util.regex.Pattern;
@ -53,7 +54,6 @@ import static org.junit.Assert.assertArrayEquals;
import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.conf.Configuration.IntegerRanges;
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
@ -135,6 +135,7 @@ public class TestConfiguration {
private void endConfig() throws IOException{
out.write("</configuration>\n");
out.flush();
out.close();
}
@ -577,6 +578,34 @@ public class TestConfiguration {
out.write("</property>\n");
}
private void appendPropertyByTag(String name, String val, String tags,
String... sources) throws IOException {
appendPropertyByTag(name, val, false, tags, sources);
}
private void appendPropertyByTag(String name, String val, boolean isFinal,
String tag, String... sources) throws IOException {
out.write("<property>");
out.write("<name>");
out.write(name);
out.write("</name>");
out.write("<value>");
out.write(val);
out.write("</value>");
if (isFinal) {
out.write("<final>true</final>");
}
for (String s : sources) {
out.write("<source>");
out.write(s);
out.write("</source>");
}
out.write("<tag>");
out.write(tag);
out.write("</tag>");
out.write("</property>\n");
}
void appendCompactFormatProperty(String name, String val) throws IOException {
appendCompactFormatProperty(name, val, false);
}
@ -2215,4 +2244,85 @@ public class TestConfiguration {
TestConfiguration.class.getName()
});
}
@Test
public void testGetAllPropertiesByTags() throws Exception {
out = new BufferedWriter(new FileWriter(CONFIG));
startConfig();
appendPropertyByTag("dfs.cblock.trace.io", "false", "DEBUG");
appendPropertyByTag("dfs.replication", "1", "PERFORMANCE,REQUIRED");
appendPropertyByTag("dfs.namenode.logging.level", "INFO", "CLIENT,DEBUG");
endConfig();
Path fileResource = new Path(CONFIG);
conf.addResource(fileResource);
conf.getProps();
List<PropertyTag> tagList = new ArrayList<>();
tagList.add(CorePropertyTag.REQUIRED);
tagList.add(CorePropertyTag.PERFORMANCE);
tagList.add(CorePropertyTag.DEBUG);
tagList.add(CorePropertyTag.CLIENT);
Properties properties = conf.getAllPropertiesByTags(tagList);
assertEq(3, properties.size());
assertEq(true, properties.containsKey("dfs.namenode.logging.level"));
assertEq(true, properties.containsKey("dfs.replication"));
assertEq(true, properties.containsKey("dfs.cblock.trace.io"));
assertEq(false, properties.containsKey("namenode.host"));
}
@Test
public void testGetAllPropertiesWithSourceByTags() throws Exception {
out = new BufferedWriter(new FileWriter(CONFIG));
startConfig();
appendPropertyByTag("dfs.cblock.trace.io", "false", "DEBUG",
"hdfs-default.xml", "core-site.xml");
appendPropertyByTag("dfs.replication", "1", "PERFORMANCE,HDFS",
"hdfs-default.xml");
appendPropertyByTag("yarn.resourcemanager.work-preserving-recovery"
+ ".enabled", "INFO", "CLIENT,DEBUG", "yarn-default.xml", "yarn-site"
+ ".xml");
endConfig();
Path fileResource = new Path(CONFIG);
conf.addResource(fileResource);
conf.getProps();
List<PropertyTag> tagList = new ArrayList<>();
tagList.add(CorePropertyTag.REQUIRED);
Properties properties;
properties = conf.getAllPropertiesByTags(tagList);
assertNotEquals(3, properties.size());
tagList.add(HDFSPropertyTag.DEBUG);
tagList.add(YarnPropertyTag.CLIENT);
tagList.add(HDFSPropertyTag.PERFORMANCE);
tagList.add(HDFSPropertyTag.HDFS);
properties = conf.getAllPropertiesByTags(tagList);
assertEq(3, properties.size());
assertEq(true, properties.containsKey("dfs.cblock.trace.io"));
assertEq(true, properties.containsKey("dfs.replication"));
assertEq(true, properties
.containsKey("yarn.resourcemanager.work-preserving-recovery.enabled"));
assertEq(false, properties.containsKey("namenode.host"));
tagList.clear();
tagList.add(HDFSPropertyTag.DEBUG);
properties = conf.getAllPropertiesByTags(tagList);
assertEq(true, properties.containsKey("dfs.cblock.trace.io"));
assertEq(false, properties.containsKey("yarn.resourcemanager"
+ ".work-preserving-recovery"));
tagList.clear();
tagList.add(YarnPropertyTag.DEBUG);
properties = conf.getAllPropertiesByTags(tagList);
assertEq(false, properties.containsKey("dfs.cblock.trace.io"));
assertEq(true, properties.containsKey("yarn.resourcemanager"
+ ".work-preserving-recovery.enabled"));
}
}