HADOOP-13660. Upgrade commons-configuration version. Contributed by Sean Mackrory.

This commit is contained in:
Wei-Chiu Chuang 2016-11-17 22:48:35 -06:00
parent 09520cb439
commit c0b1a44f6c
28 changed files with 94 additions and 68 deletions

View File

@ -410,4 +410,10 @@
<Filed name="done"/> <Filed name="done"/>
<Bug pattern="JLM_JSR166_UTILCONCURRENT_MONITORENTER"/> <Bug pattern="JLM_JSR166_UTILCONCURRENT_MONITORENTER"/>
</Match> </Match>
<Match>
<Class name="org.apache.hadoop.metrics2.impl.MetricsConfig"/>
<Method name="toString"/>
<Bug pattern="DM_DEFAULT_ENCODING"/>
</Match>
</FindBugsFilter> </FindBugsFilter>

View File

@ -173,8 +173,13 @@
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-configuration</groupId> <groupId>commons-beanutils</groupId>
<artifactId>commons-configuration</artifactId> <artifactId>commons-beanutils</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-configuration2</artifactId>
<scope>compile</scope> <scope>compile</scope>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.metrics2; package org.apache.hadoop.metrics2;
import org.apache.commons.configuration.SubsetConfiguration; import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;

View File

@ -21,7 +21,7 @@ package org.apache.hadoop.metrics2.filter;
import java.util.Map; import java.util.Map;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import org.apache.commons.configuration.SubsetConfiguration; import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.metrics2.MetricsException; import org.apache.hadoop.metrics2.MetricsException;

View File

@ -19,13 +19,12 @@
package org.apache.hadoop.metrics2.impl; package org.apache.hadoop.metrics2.impl;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.io.PrintStream; import java.io.PrintWriter;
import java.net.URL; import java.net.URL;
import java.net.URLClassLoader; import java.net.URLClassLoader;
import static java.security.AccessController.*; import static java.security.AccessController.*;
import java.security.PrivilegedAction; import java.security.PrivilegedAction;
import java.util.Iterator; import java.util.Iterator;
import java.util.Locale;
import java.util.Map; import java.util.Map;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
@ -35,10 +34,13 @@ import com.google.common.base.Splitter;
import com.google.common.collect.Iterables; import com.google.common.collect.Iterables;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration2.Configuration;
import org.apache.commons.configuration.ConfigurationException; import org.apache.commons.configuration2.PropertiesConfiguration;
import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.commons.configuration.SubsetConfiguration; import org.apache.commons.configuration2.builder.fluent.Configurations;
import org.apache.commons.configuration2.builder.fluent.Parameters;
import org.apache.commons.configuration2.convert.DefaultListDelimiterHandler;
import org.apache.commons.configuration2.ex.ConfigurationException;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.metrics2.MetricsFilter; import org.apache.hadoop.metrics2.MetricsFilter;
@ -110,15 +112,20 @@ class MetricsConfig extends SubsetConfiguration {
static MetricsConfig loadFirst(String prefix, String... fileNames) { static MetricsConfig loadFirst(String prefix, String... fileNames) {
for (String fname : fileNames) { for (String fname : fileNames) {
try { try {
Configuration cf = new PropertiesConfiguration(fname) Configuration cf = new Configurations().propertiesBuilder(fname)
.interpolatedConfiguration(); .configure(new Parameters().properties()
.setFileName(fname)
.setListDelimiterHandler(new DefaultListDelimiterHandler(',')))
.getConfiguration()
.interpolatedConfiguration();
LOG.info("loaded properties from "+ fname); LOG.info("loaded properties from "+ fname);
LOG.debug(toString(cf)); LOG.debug(toString(cf));
MetricsConfig mc = new MetricsConfig(cf, prefix); MetricsConfig mc = new MetricsConfig(cf, prefix);
LOG.debug(mc); LOG.debug(mc);
return mc; return mc;
} catch (ConfigurationException e) { } catch (ConfigurationException e) {
if (e.getMessage().startsWith("Cannot locate configuration")) { // Commons Configuration defines the message text when file not found
if (e.getMessage().startsWith("Could not locate")) {
continue; continue;
} }
throw new MetricsConfigException(e); throw new MetricsConfigException(e);
@ -175,8 +182,8 @@ class MetricsConfig extends SubsetConfiguration {
* @return the value or null * @return the value or null
*/ */
@Override @Override
public Object getProperty(String key) { public Object getPropertyInternal(String key) {
Object value = super.getProperty(key); Object value = super.getPropertyInternal(key);
if (value == null) { if (value == null) {
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("poking parent '"+ getParent().getClass().getSimpleName() + LOG.debug("poking parent '"+ getParent().getClass().getSimpleName() +
@ -249,11 +256,6 @@ class MetricsConfig extends SubsetConfiguration {
return defaultLoader; return defaultLoader;
} }
@Override public void clear() {
super.clear();
// pluginLoader.close(); // jdk7 is saner
}
MetricsFilter getFilter(String prefix) { MetricsFilter getFilter(String prefix) {
// don't create filter instances without out options // don't create filter instances without out options
MetricsConfig conf = subset(prefix); MetricsConfig conf = subset(prefix);
@ -274,10 +276,10 @@ class MetricsConfig extends SubsetConfiguration {
static String toString(Configuration c) { static String toString(Configuration c) {
ByteArrayOutputStream buffer = new ByteArrayOutputStream(); ByteArrayOutputStream buffer = new ByteArrayOutputStream();
try { try {
PrintStream ps = new PrintStream(buffer, false, "UTF-8"); PrintWriter pw = new PrintWriter(buffer, false);
PropertiesConfiguration tmp = new PropertiesConfiguration(); PropertiesConfiguration tmp = new PropertiesConfiguration();
tmp.copy(c); tmp.copy(c);
tmp.save(ps); tmp.write(pw);
return buffer.toString("UTF-8"); return buffer.toString("UTF-8");
} catch (Exception e) { } catch (Exception e) {
throw new MetricsConfigException(e); throw new MetricsConfigException(e);

View File

@ -36,7 +36,7 @@ import com.google.common.annotations.VisibleForTesting;
import java.util.Locale; import java.util.Locale;
import static com.google.common.base.Preconditions.*; import static com.google.common.base.Preconditions.*;
import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration2.PropertiesConfiguration;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.math3.util.ArithmeticUtils; import org.apache.commons.math3.util.ArithmeticUtils;
@ -347,7 +347,7 @@ public class MetricsSystemImpl extends MetricsSystem implements MetricsSource {
PropertiesConfiguration saver = new PropertiesConfiguration(); PropertiesConfiguration saver = new PropertiesConfiguration();
StringWriter writer = new StringWriter(); StringWriter writer = new StringWriter();
saver.copy(config); saver.copy(config);
try { saver.save(writer); } try { saver.write(writer); }
catch (Exception e) { catch (Exception e) {
throw new MetricsConfigException("Error stringify config", e); throw new MetricsConfigException("Error stringify config", e);
} }

View File

@ -24,7 +24,7 @@ import java.io.FileOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.PrintStream; import java.io.PrintStream;
import org.apache.commons.configuration.SubsetConfiguration; import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.AbstractMetric; import org.apache.hadoop.metrics2.AbstractMetric;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.metrics2.sink; package org.apache.hadoop.metrics2.sink;
import org.apache.commons.configuration.SubsetConfiguration; import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;

View File

@ -36,7 +36,7 @@ import java.util.concurrent.TimeUnit;
import java.util.regex.Matcher; import java.util.regex.Matcher;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.apache.commons.configuration.SubsetConfiguration; import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.commons.lang.time.FastDateFormat; import org.apache.commons.lang.time.FastDateFormat;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;

View File

@ -25,7 +25,7 @@ import java.net.DatagramSocket;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import org.apache.commons.configuration.SubsetConfiguration; import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.AbstractMetric; import org.apache.hadoop.metrics2.AbstractMetric;

View File

@ -25,7 +25,7 @@ import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.commons.configuration.SubsetConfiguration; import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.metrics2.MetricsSink; import org.apache.hadoop.metrics2.MetricsSink;

View File

@ -26,7 +26,8 @@ import java.util.Iterator;
import java.util.Map; import java.util.Map;
import java.util.Set; import java.util.Set;
import org.apache.commons.configuration.SubsetConfiguration; import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.commons.configuration2.convert.DefaultListDelimiterHandler;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
@ -58,7 +59,7 @@ public class GangliaSink30 extends AbstractGangliaSink {
public void init(SubsetConfiguration conf) { public void init(SubsetConfiguration conf) {
super.init(conf); super.init(conf);
conf.setListDelimiter(','); conf.setListDelimiterHandler(new DefaultListDelimiterHandler(','));
Iterator<String> it = (Iterator<String>) conf.getKeys(); Iterator<String> it = (Iterator<String>) conf.getKeys();
while (it.hasNext()) { while (it.hasNext()) {
String propertyName = it.next(); String propertyName = it.next();
@ -66,20 +67,17 @@ public class GangliaSink30 extends AbstractGangliaSink {
String contextName = propertyName.substring(TAGS_FOR_PREFIX_PROPERTY_PREFIX.length()); String contextName = propertyName.substring(TAGS_FOR_PREFIX_PROPERTY_PREFIX.length());
String[] tags = conf.getStringArray(propertyName); String[] tags = conf.getStringArray(propertyName);
boolean useAllTags = false; boolean useAllTags = false;
Set<String> set = null; Set<String> set = new HashSet<>();
if (tags.length > 0) { for (String tag : tags) {
set = new HashSet<String>(); tag = tag.trim();
for (String tag : tags) { useAllTags |= tag.equals("*");
tag = tag.trim(); if (tag.length() > 0) {
useAllTags |= tag.equals("*"); set.add(tag);
if (tag.length() > 0) {
set.add(tag);
}
}
if (useAllTags) {
set = null;
} }
} }
if (useAllTags) {
set = null;
}
useTagsMap.put(contextName, set); useTagsMap.put(contextName, set);
} }
} }

View File

@ -21,7 +21,7 @@ package org.apache.hadoop.metrics2.filter;
import java.util.Arrays; import java.util.Arrays;
import java.util.List; import java.util.List;
import org.apache.commons.configuration.SubsetConfiguration; import org.apache.commons.configuration2.SubsetConfiguration;
import org.junit.Test; import org.junit.Test;
import static org.junit.Assert.*; import static org.junit.Assert.*;

View File

@ -18,8 +18,11 @@
package org.apache.hadoop.metrics2.impl; package org.apache.hadoop.metrics2.impl;
import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration2.PropertiesConfiguration;
import org.apache.commons.configuration.SubsetConfiguration; import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.commons.configuration2.convert.DefaultListDelimiterHandler;
import java.io.FileWriter;
/** /**
* Helper class for building configs, mostly used in tests * Helper class for building configs, mostly used in tests
@ -33,6 +36,7 @@ public class ConfigBuilder {
*/ */
public ConfigBuilder() { public ConfigBuilder() {
config = new PropertiesConfiguration(); config = new PropertiesConfiguration();
config.setListDelimiterHandler(new DefaultListDelimiterHandler(','));
} }
/** /**
@ -54,7 +58,8 @@ public class ConfigBuilder {
*/ */
public ConfigBuilder save(String filename) { public ConfigBuilder save(String filename) {
try { try {
config.save(filename); FileWriter fw = new FileWriter(filename);
config.write(fw);
} }
catch (Exception e) { catch (Exception e) {
throw new RuntimeException("Error saving config", e); throw new RuntimeException("Error saving config", e);

View File

@ -18,12 +18,12 @@
package org.apache.hadoop.metrics2.impl; package org.apache.hadoop.metrics2.impl;
import java.io.PrintStream; import java.io.PrintWriter;
import java.util.Iterator; import java.util.Iterator;
import static org.junit.Assert.*; import static org.junit.Assert.*;
import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration2.Configuration;
import org.apache.commons.configuration.PropertiesConfiguration; import org.apache.commons.configuration2.PropertiesConfiguration;
/** /**
* Helpers for config tests and debugging * Helpers for config tests and debugging
@ -31,20 +31,20 @@ import org.apache.commons.configuration.PropertiesConfiguration;
class ConfigUtil { class ConfigUtil {
static void dump(Configuration c) { static void dump(Configuration c) {
dump(null, c, System.out); dump(null, c, new PrintWriter(System.out));
} }
static void dump(String header, Configuration c) { static void dump(String header, Configuration c) {
dump(header, c, System.out); dump(header, c, new PrintWriter(System.out));
} }
static void dump(String header, Configuration c, PrintStream out) { static void dump(String header, Configuration c, PrintWriter out) {
PropertiesConfiguration p = new PropertiesConfiguration(); PropertiesConfiguration p = new PropertiesConfiguration();
p.copy(c); p.copy(c);
if (header != null) { if (header != null) {
out.println(header); out.println(header);
} }
try { p.save(out); } try { p.write(out); }
catch (Exception e) { catch (Exception e) {
throw new RuntimeException("Error saving config", e); throw new RuntimeException("Error saving config", e);
} }

View File

@ -21,7 +21,7 @@ package org.apache.hadoop.metrics2.impl;
import org.junit.Test; import org.junit.Test;
import static org.junit.Assert.*; import static org.junit.Assert.*;
import org.apache.commons.configuration.SubsetConfiguration; import org.apache.commons.configuration2.SubsetConfiguration;
import static org.apache.hadoop.metrics2.filter.TestPatternFilter.*; import static org.apache.hadoop.metrics2.filter.TestPatternFilter.*;
import static org.apache.hadoop.metrics2.lib.Interns.*; import static org.apache.hadoop.metrics2.lib.Interns.*;

View File

@ -23,7 +23,7 @@ import java.util.Map;
import org.junit.Test; import org.junit.Test;
import static org.junit.Assert.*; import static org.junit.Assert.*;
import org.apache.commons.configuration.Configuration; import org.apache.commons.configuration2.Configuration;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import static org.apache.hadoop.metrics2.impl.ConfigUtil.*; import static org.apache.hadoop.metrics2.impl.ConfigUtil.*;

View File

@ -41,7 +41,7 @@ import static org.mockito.Mockito.*;
import com.google.common.base.Predicate; import com.google.common.base.Predicate;
import com.google.common.collect.Iterables; import com.google.common.collect.Iterables;
import org.apache.commons.configuration.SubsetConfiguration; import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.metrics2.MetricsException; import org.apache.hadoop.metrics2.MetricsException;

View File

@ -32,7 +32,7 @@ import java.util.Calendar;
import java.util.Date; import java.util.Date;
import java.util.TimeZone; import java.util.TimeZone;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import org.apache.commons.configuration.SubsetConfiguration; import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.metrics2.sink; package org.apache.hadoop.metrics2.sink;
import java.util.Calendar; import java.util.Calendar;
import org.apache.commons.configuration.SubsetConfiguration; import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.metrics2.MetricsException; import org.apache.hadoop.metrics2.MetricsException;
import org.apache.hadoop.metrics2.impl.ConfigBuilder; import org.apache.hadoop.metrics2.impl.ConfigBuilder;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.metrics2.sink.ganglia; package org.apache.hadoop.metrics2.sink.ganglia;
import org.apache.commons.configuration.SubsetConfiguration; import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.hadoop.metrics2.impl.ConfigBuilder; import org.apache.hadoop.metrics2.impl.ConfigBuilder;
import org.junit.Test; import org.junit.Test;

View File

@ -18,7 +18,6 @@
package org.apache.hadoop.hdfs; package org.apache.hadoop.hdfs;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
import org.apache.commons.configuration.SystemConfiguration;
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.util.StripedBlockUtil; import org.apache.hadoop.hdfs.util.StripedBlockUtil;

View File

@ -17,7 +17,7 @@
*/ */
package org.apache.hadoop.hdfs.server.datanode; package org.apache.hadoop.hdfs.server.datanode;
import org.apache.commons.configuration.SubsetConfiguration; import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.HdfsConfiguration; import org.apache.hadoop.hdfs.HdfsConfiguration;
import org.apache.hadoop.metrics2.AbstractMetric; import org.apache.hadoop.metrics2.AbstractMetric;

View File

@ -801,9 +801,14 @@
<version>3.2.2</version> <version>3.2.2</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>commons-configuration</groupId> <groupId>commons-beanutils</groupId>
<artifactId>commons-configuration</artifactId> <artifactId>commons-beanutils</artifactId>
<version>1.6</version> <version>1.9.3</version>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-configuration2</artifactId>
<version>2.1</version>
</dependency> </dependency>
<dependency> <dependency>
<groupId>org.slf4j</groupId> <groupId>org.slf4j</groupId>

View File

@ -163,6 +163,12 @@
<groupId>com.microsoft.azure</groupId> <groupId>com.microsoft.azure</groupId>
<artifactId>azure-storage</artifactId> <artifactId>azure-storage</artifactId>
<scope>compile</scope> <scope>compile</scope>
<exclusions>
<exclusion>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
</exclusion>
</exclusions>
</dependency> </dependency>
<dependency> <dependency>

View File

@ -29,7 +29,7 @@ import java.util.GregorianCalendar;
import java.util.TimeZone; import java.util.TimeZone;
import java.util.concurrent.ConcurrentLinkedQueue; import java.util.concurrent.ConcurrentLinkedQueue;
import org.apache.commons.configuration.SubsetConfiguration; import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.azure.metrics.AzureFileSystemInstrumentation; import org.apache.hadoop.fs.azure.metrics.AzureFileSystemInstrumentation;

View File

@ -21,7 +21,7 @@ package org.apache.hadoop.metrics2.sink;
import com.google.common.base.Strings; import com.google.common.base.Strings;
import org.apache.kafka.clients.producer.Producer; import org.apache.kafka.clients.producer.Producer;
import org.apache.kafka.clients.producer.KafkaProducer; import org.apache.kafka.clients.producer.KafkaProducer;
import org.apache.commons.configuration.SubsetConfiguration; import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.AbstractMetric; import org.apache.hadoop.metrics2.AbstractMetric;

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.metrics2.impl;
import com.google.common.base.Objects; import com.google.common.base.Objects;
import com.google.common.collect.Lists; import com.google.common.collect.Lists;
import org.apache.commons.configuration.SubsetConfiguration; import org.apache.commons.configuration2.SubsetConfiguration;
import org.apache.hadoop.metrics2.AbstractMetric; import org.apache.hadoop.metrics2.AbstractMetric;
import org.apache.hadoop.metrics2.MetricType; import org.apache.hadoop.metrics2.MetricType;
import org.apache.hadoop.metrics2.MetricsInfo; import org.apache.hadoop.metrics2.MetricsInfo;