HADOOP-13660. Upgrade commons-configuration version. Contributed by Sean Mackrory.

This commit is contained in:
Wei-Chiu Chuang 2016-11-17 22:48:35 -06:00
parent 09520cb439
commit c0b1a44f6c
28 changed files with 94 additions and 68 deletions

View File

@ -410,4 +410,10 @@
<Filed name="done"/>
<Bug pattern="JLM_JSR166_UTILCONCURRENT_MONITORENTER"/>
</Match>
<Match>
<Class name="org.apache.hadoop.metrics2.impl.MetricsConfig"/>
<Method name="toString"/>
<Bug pattern="DM_DEFAULT_ENCODING"/>
</Match>
</FindBugsFilter>

View File

@ -173,8 +173,13 @@
<scope>compile</scope>
</dependency>
<dependency>
<groupId>commons-configuration</groupId>
<artifactId>commons-configuration</artifactId>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-configuration2</artifactId>
<scope>compile</scope>
</dependency>
<dependency>

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.metrics2;
import org.apache.commons.configuration.SubsetConfiguration;
import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;

View File

@ -21,7 +21,7 @@
import java.util.Map;
import com.google.common.collect.Maps;
import org.apache.commons.configuration.SubsetConfiguration;
import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.metrics2.MetricsException;

View File

@ -19,13 +19,12 @@
package org.apache.hadoop.metrics2.impl;
import java.io.ByteArrayOutputStream;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.net.URL;
import java.net.URLClassLoader;
import static java.security.AccessController.*;
import java.security.PrivilegedAction;
import java.util.Iterator;
import java.util.Locale;
import java.util.Map;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
@ -35,10 +34,13 @@
import com.google.common.collect.Iterables;
import com.google.common.collect.Maps;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.ConfigurationException;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.configuration.SubsetConfiguration;
import org.apache.commons.configuration2.Configuration;
import org.apache.commons.configuration2.PropertiesConfiguration;
import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.commons.configuration2.builder.fluent.Configurations;
import org.apache.commons.configuration2.builder.fluent.Parameters;
import org.apache.commons.configuration2.convert.DefaultListDelimiterHandler;
import org.apache.commons.configuration2.ex.ConfigurationException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.metrics2.MetricsFilter;
@ -110,15 +112,20 @@ static MetricsConfig create(String prefix, String... fileNames) {
static MetricsConfig loadFirst(String prefix, String... fileNames) {
for (String fname : fileNames) {
try {
Configuration cf = new PropertiesConfiguration(fname)
.interpolatedConfiguration();
Configuration cf = new Configurations().propertiesBuilder(fname)
.configure(new Parameters().properties()
.setFileName(fname)
.setListDelimiterHandler(new DefaultListDelimiterHandler(',')))
.getConfiguration()
.interpolatedConfiguration();
LOG.info("loaded properties from "+ fname);
LOG.debug(toString(cf));
MetricsConfig mc = new MetricsConfig(cf, prefix);
LOG.debug(mc);
return mc;
} catch (ConfigurationException e) {
if (e.getMessage().startsWith("Cannot locate configuration")) {
// Commons Configuration defines the message text when file not found
if (e.getMessage().startsWith("Could not locate")) {
continue;
}
throw new MetricsConfigException(e);
@ -175,8 +182,8 @@ public Iterator<String> iterator() {
* @return the value or null
*/
@Override
public Object getProperty(String key) {
Object value = super.getProperty(key);
public Object getPropertyInternal(String key) {
Object value = super.getPropertyInternal(key);
if (value == null) {
if (LOG.isDebugEnabled()) {
LOG.debug("poking parent '"+ getParent().getClass().getSimpleName() +
@ -249,11 +256,6 @@ ClassLoader getPluginLoader() {
return defaultLoader;
}
@Override public void clear() {
super.clear();
// pluginLoader.close(); // jdk7 is saner
}
MetricsFilter getFilter(String prefix) {
// don't create filter instances without out options
MetricsConfig conf = subset(prefix);
@ -274,10 +276,10 @@ public String toString() {
static String toString(Configuration c) {
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
try {
PrintStream ps = new PrintStream(buffer, false, "UTF-8");
PrintWriter pw = new PrintWriter(buffer, false);
PropertiesConfiguration tmp = new PropertiesConfiguration();
tmp.copy(c);
tmp.save(ps);
tmp.write(pw);
return buffer.toString("UTF-8");
} catch (Exception e) {
throw new MetricsConfigException(e);

View File

@ -36,7 +36,7 @@
import java.util.Locale;
import static com.google.common.base.Preconditions.*;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.configuration2.PropertiesConfiguration;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.math3.util.ArithmeticUtils;
@ -347,7 +347,7 @@ public synchronized String currentConfig() {
PropertiesConfiguration saver = new PropertiesConfiguration();
StringWriter writer = new StringWriter();
saver.copy(config);
try { saver.save(writer); }
try { saver.write(writer); }
catch (Exception e) {
throw new MetricsConfigException("Error stringify config", e);
}

View File

@ -24,7 +24,7 @@
import java.io.IOException;
import java.io.PrintStream;
import org.apache.commons.configuration.SubsetConfiguration;
import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.AbstractMetric;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.metrics2.sink;
import org.apache.commons.configuration.SubsetConfiguration;
import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;

View File

@ -36,7 +36,7 @@
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.configuration.SubsetConfiguration;
import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.commons.lang.time.FastDateFormat;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;

View File

@ -25,7 +25,7 @@
import java.net.InetSocketAddress;
import java.nio.charset.StandardCharsets;
import org.apache.commons.configuration.SubsetConfiguration;
import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.AbstractMetric;

View File

@ -25,7 +25,7 @@
import java.util.List;
import java.util.Map;
import org.apache.commons.configuration.SubsetConfiguration;
import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.metrics2.MetricsSink;

View File

@ -26,7 +26,8 @@
import java.util.Map;
import java.util.Set;
import org.apache.commons.configuration.SubsetConfiguration;
import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.commons.configuration2.convert.DefaultListDelimiterHandler;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
@ -58,7 +59,7 @@ public class GangliaSink30 extends AbstractGangliaSink {
public void init(SubsetConfiguration conf) {
super.init(conf);
conf.setListDelimiter(',');
conf.setListDelimiterHandler(new DefaultListDelimiterHandler(','));
Iterator<String> it = (Iterator<String>) conf.getKeys();
while (it.hasNext()) {
String propertyName = it.next();
@ -66,20 +67,17 @@ public void init(SubsetConfiguration conf) {
String contextName = propertyName.substring(TAGS_FOR_PREFIX_PROPERTY_PREFIX.length());
String[] tags = conf.getStringArray(propertyName);
boolean useAllTags = false;
Set<String> set = null;
if (tags.length > 0) {
set = new HashSet<String>();
for (String tag : tags) {
tag = tag.trim();
useAllTags |= tag.equals("*");
if (tag.length() > 0) {
set.add(tag);
}
}
if (useAllTags) {
set = null;
Set<String> set = new HashSet<>();
for (String tag : tags) {
tag = tag.trim();
useAllTags |= tag.equals("*");
if (tag.length() > 0) {
set.add(tag);
}
}
if (useAllTags) {
set = null;
}
useTagsMap.put(contextName, set);
}
}

View File

@ -21,7 +21,7 @@
import java.util.Arrays;
import java.util.List;
import org.apache.commons.configuration.SubsetConfiguration;
import org.apache.commons.configuration2.SubsetConfiguration;
import org.junit.Test;
import static org.junit.Assert.*;

View File

@ -18,8 +18,11 @@
package org.apache.hadoop.metrics2.impl;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.configuration.SubsetConfiguration;
import org.apache.commons.configuration2.PropertiesConfiguration;
import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.commons.configuration2.convert.DefaultListDelimiterHandler;
import java.io.FileWriter;
/**
* Helper class for building configs, mostly used in tests
@ -33,6 +36,7 @@ public class ConfigBuilder {
*/
public ConfigBuilder() {
config = new PropertiesConfiguration();
config.setListDelimiterHandler(new DefaultListDelimiterHandler(','));
}
/**
@ -54,7 +58,8 @@ public ConfigBuilder add(String key, Object value) {
*/
public ConfigBuilder save(String filename) {
try {
config.save(filename);
FileWriter fw = new FileWriter(filename);
config.write(fw);
}
catch (Exception e) {
throw new RuntimeException("Error saving config", e);

View File

@ -18,12 +18,12 @@
package org.apache.hadoop.metrics2.impl;
import java.io.PrintStream;
import java.io.PrintWriter;
import java.util.Iterator;
import static org.junit.Assert.*;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration.PropertiesConfiguration;
import org.apache.commons.configuration2.Configuration;
import org.apache.commons.configuration2.PropertiesConfiguration;
/**
* Helpers for config tests and debugging
@ -31,20 +31,20 @@
class ConfigUtil {
static void dump(Configuration c) {
dump(null, c, System.out);
dump(null, c, new PrintWriter(System.out));
}
static void dump(String header, Configuration c) {
dump(header, c, System.out);
dump(header, c, new PrintWriter(System.out));
}
static void dump(String header, Configuration c, PrintStream out) {
static void dump(String header, Configuration c, PrintWriter out) {
PropertiesConfiguration p = new PropertiesConfiguration();
p.copy(c);
if (header != null) {
out.println(header);
}
try { p.save(out); }
try { p.write(out); }
catch (Exception e) {
throw new RuntimeException("Error saving config", e);
}

View File

@ -21,7 +21,7 @@
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.commons.configuration.SubsetConfiguration;
import org.apache.commons.configuration2.SubsetConfiguration;
import static org.apache.hadoop.metrics2.filter.TestPatternFilter.*;
import static org.apache.hadoop.metrics2.lib.Interns.*;

View File

@ -23,7 +23,7 @@
import org.junit.Test;
import static org.junit.Assert.*;
import org.apache.commons.configuration.Configuration;
import org.apache.commons.configuration2.Configuration;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.Log;
import static org.apache.hadoop.metrics2.impl.ConfigUtil.*;

View File

@ -41,7 +41,7 @@
import com.google.common.base.Predicate;
import com.google.common.collect.Iterables;
import org.apache.commons.configuration.SubsetConfiguration;
import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.metrics2.MetricsException;

View File

@ -32,7 +32,7 @@
import java.util.Date;
import java.util.TimeZone;
import java.util.regex.Pattern;
import org.apache.commons.configuration.SubsetConfiguration;
import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.metrics2.sink;
import java.util.Calendar;
import org.apache.commons.configuration.SubsetConfiguration;
import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.metrics2.MetricsException;
import org.apache.hadoop.metrics2.impl.ConfigBuilder;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.metrics2.sink.ganglia;
import org.apache.commons.configuration.SubsetConfiguration;
import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.hadoop.metrics2.impl.ConfigBuilder;
import org.junit.Test;

View File

@ -18,7 +18,6 @@
package org.apache.hadoop.hdfs;
import com.google.common.base.Preconditions;
import org.apache.commons.configuration.SystemConfiguration;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.util.StripedBlockUtil;

View File

@ -17,7 +17,7 @@
*/
package org.apache.hadoop.hdfs.server.datanode;
import org.apache.commons.configuration.SubsetConfiguration;
import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.metrics2.AbstractMetric;

View File

@ -801,9 +801,14 @@
<version>3.2.2</version>
</dependency>
<dependency>
<groupId>commons-configuration</groupId>
<artifactId>commons-configuration</artifactId>
<version>1.6</version>
<groupId>commons-beanutils</groupId>
<artifactId>commons-beanutils</artifactId>
<version>1.9.3</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-configuration2</artifactId>
<version>2.1</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>

View File

@ -163,6 +163,12 @@
<groupId>com.microsoft.azure</groupId>
<artifactId>azure-storage</artifactId>
<scope>compile</scope>
<exclusions>
<exclusion>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>

View File

@ -29,7 +29,7 @@
import java.util.TimeZone;
import java.util.concurrent.ConcurrentLinkedQueue;
import org.apache.commons.configuration.SubsetConfiguration;
import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.azure.metrics.AzureFileSystemInstrumentation;

View File

@ -21,7 +21,7 @@
import com.google.common.base.Strings;
import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.commons.configuration.SubsetConfiguration;
import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.AbstractMetric;

View File

@ -20,7 +20,7 @@
import com.google.common.base.Objects;
import com.google.common.collect.Lists;
import org.apache.commons.configuration.SubsetConfiguration;
import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.metrics2.MetricType;
import org.apache.hadoop.metrics2.MetricsInfo;