diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java index a339dac6fe1..28908530ac8 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java @@ -81,6 +81,7 @@ import javax.xml.transform.stream.StreamResult; import com.google.common.base.Charsets; import org.apache.commons.collections.map.UnmodifiableMap; +import org.apache.commons.io.FilenameUtils; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.fs.CommonConfigurationKeysPublic; @@ -2811,6 +2812,7 @@ public class Configuration implements Iterable>, String confName = null; String confValue = null; String confInclude = null; + String confTag = null; boolean confFinal = false; boolean fallbackAllowed = false; boolean fallbackEntered = false; @@ -2825,6 +2827,7 @@ public class Configuration implements Iterable>, confName = null; confValue = null; confFinal = false; + confTag = null; confSource.clear(); // First test for short format configuration @@ -2843,9 +2846,8 @@ public class Configuration implements Iterable>, confSource.add(StringInterner.weakIntern( reader.getAttributeValue(i))); } else if ("tag".equals(propertyAttr)) { - //Read tags and put them in propertyTagsMap - readTagFromConfig(reader.getAttributeValue(i), confName, - confValue, confSource); + confTag = StringInterner + .weakIntern(reader.getAttributeValue(i)); } } break; @@ -2937,9 +2939,7 @@ public class Configuration implements Iterable>, break; case "tag": if (token.length() > 0) { - //Read tags and put them in propertyTagsMap - readTagFromConfig(token.toString(), confName, - confValue, confSource); + confTag = StringInterner.weakIntern(token.toString()); } break; case "include": @@ -2956,6 +2956,11 @@ public class Configuration implements Iterable>, break; } confSource.add(name); + //Read tags and put them in propertyTagsMap + if (confTag != null) { + readTagFromConfig(confTag, confName, confValue, confSource); + } + DeprecatedKeyInfo keyInfo = deprecations.getDeprecatedKeyMap().get(confName); if (keyInfo != null) { @@ -3001,21 +3006,24 @@ public class Configuration implements Iterable>, if (confSource.size() > 0) { for (String source : confSource) { PropertyTag tag1 = this.getPropertyTag(tagStr, - source.split("-")[0]); - if (propertyTagsMap.containsKey(tag1)) { - propertyTagsMap.get(tag1) - .setProperty(confName, confValue); - } else { - Properties props = new Properties(); - props.setProperty(confName, confValue); - propertyTagsMap.put(tag1, props); + FilenameUtils.getName(source).split("-")[0]); + if (tag1 != null) { + //Handle property with no/null value + if (confValue == null) { + confValue = ""; + } + if (propertyTagsMap.containsKey(tag1)) { + propertyTagsMap.get(tag1).setProperty(confName, confValue); + } else { + Properties props = new Properties(); + props.setProperty(confName, confValue); + propertyTagsMap.put(tag1, props); + } } } } else { - //If no source is set try to find tag in CorePropertyTag - if (propertyTagsMap - .containsKey(CorePropertyTag.valueOf(tagStr) - )) { + // If no source is set try to find tag in CorePropertyTag + if (propertyTagsMap.containsKey(CorePropertyTag.valueOf(tagStr))) { propertyTagsMap.get(CorePropertyTag.valueOf(tagStr)) .setProperty(confName, confValue); } else { @@ -3025,11 +3033,11 @@ public class Configuration implements Iterable>, props); } } - } catch (IllegalArgumentException iae) { - //Log the invalid tag and continue to parse rest of the - // properties. + } catch (Exception ex) { + // Log the invalid tag and continue to parse rest of the properties. LOG.info("Invalid tag '" + tagStr + "' found for " - + "property:" + confName, iae); + + "property:" + confName + " Source:" + Arrays + .toString(confSource.toArray()), ex); } } diff --git a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java index 4cd1666eca2..a806b8cb670 100644 --- a/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java +++ b/hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/conf/TestConfiguration.java @@ -77,6 +77,8 @@ public class TestConfiguration { private Configuration conf; final static String CONFIG = new File("./test-config-TestConfiguration.xml").getAbsolutePath(); final static String CONFIG2 = new File("./test-config2-TestConfiguration.xml").getAbsolutePath(); + final static String CONFIG_CORE = new File("./core-site.xml") + .getAbsolutePath(); final static String CONFIG_FOR_ENUM = new File("./test-config-enum-TestConfiguration.xml").getAbsolutePath(); final static String CONFIG_FOR_URI = "file://" + new File("./test-config-uri-TestConfiguration.xml").getAbsolutePath(); @@ -114,6 +116,7 @@ public class TestConfiguration { new File(new URI(CONFIG_FOR_URI)).delete(); new File(CONFIG_MULTI_BYTE).delete(); new File(CONFIG_MULTI_BYTE_SAVED).delete(); + new File(CONFIG_CORE).delete(); } private void startConfig() throws IOException{ @@ -2248,14 +2251,14 @@ public class TestConfiguration { @Test public void testGetAllPropertiesByTags() throws Exception { - out = new BufferedWriter(new FileWriter(CONFIG)); + out = new BufferedWriter(new FileWriter(CONFIG_CORE)); startConfig(); appendPropertyByTag("dfs.cblock.trace.io", "false", "DEBUG"); appendPropertyByTag("dfs.replication", "1", "PERFORMANCE,REQUIRED"); appendPropertyByTag("dfs.namenode.logging.level", "INFO", "CLIENT,DEBUG"); endConfig(); - Path fileResource = new Path(CONFIG); + Path fileResource = new Path(CONFIG_CORE); conf.addResource(fileResource); conf.getProps(); @@ -2266,6 +2269,10 @@ public class TestConfiguration { tagList.add(CorePropertyTag.CLIENT); Properties properties = conf.getAllPropertiesByTags(tagList); + String[] sources = conf.getPropertySources("dfs.replication"); + assertTrue(sources.length == 1); + assertTrue(Arrays.toString(sources).contains("core-site.xml")); + assertEq(3, properties.size()); assertEq(true, properties.containsKey("dfs.namenode.logging.level")); assertEq(true, properties.containsKey("dfs.replication"));