HADOOP-7266. Deprecate metrics v1. Contributed by Akira AJISAKA.

This commit is contained in:
Tsuyoshi Ozawa 2015-10-23 23:47:51 +09:00
parent 124a412a37
commit 35a303dfbe
34 changed files with 83 additions and 4 deletions

View File

@ -905,6 +905,8 @@ Release 2.8.0 - UNRELEASED
HADOOP-9692. Improving log message when SequenceFile reader throws HADOOP-9692. Improving log message when SequenceFile reader throws
EOFException on zero-length file. (Zhe Zhang and Chu Tong via ozawa) EOFException on zero-length file. (Zhe Zhang and Chu Tong via ozawa)
HADOOP-7266. Deprecate metrics v1. (Akira AJISAKA via ozawa)
OPTIMIZATIONS OPTIMIZATIONS
HADOOP-11785. Reduce the number of listStatus operation in distcp HADOOP-11785. Reduce the number of listStatus operation in distcp

View File

@ -541,7 +541,9 @@ public final class HttpServer2 implements FilterContainer {
/** /**
* Add default servlets. * Add default servlets.
* Note: /metrics servlet will be removed in 3.X release.
*/ */
@SuppressWarnings("deprecation")
protected void addDefaultServlets() { protected void addDefaultServlets() {
// set up default servlets // set up default servlets
addServlet("stacks", "/stacks", StackServlet.class); addServlet("stacks", "/stacks", StackServlet.class);

View File

@ -36,7 +36,10 @@ import org.apache.hadoop.metrics.spi.NullContext;
/** /**
* Factory class for creating MetricsContext objects. To obtain an instance * Factory class for creating MetricsContext objects. To obtain an instance
* of this class, use the static <code>getFactory()</code> method. * of this class, use the static <code>getFactory()</code> method.
*
* @deprecated Use org.apache.hadoop.metrics2 package instead.
*/ */
@Deprecated
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class ContextFactory { public class ContextFactory {

View File

@ -30,7 +30,10 @@ import org.apache.hadoop.metrics.spi.OutputRecord;
/** /**
* The main interface to the metrics package. * The main interface to the metrics package.
*
* @deprecated Use org.apache.hadoop.metrics2 package instead.
*/ */
@Deprecated
@InterfaceAudience.Private @InterfaceAudience.Private
@InterfaceStability.Evolving @InterfaceStability.Evolving
public interface MetricsContext { public interface MetricsContext {

View File

@ -25,7 +25,9 @@ import org.apache.hadoop.classification.InterfaceStability;
/** /**
* General-purpose, unchecked metrics exception. * General-purpose, unchecked metrics exception.
* @deprecated Use {@link org.apache.hadoop.metrics2.MetricsException} instead.
*/ */
@Deprecated
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class MetricsException extends RuntimeException { public class MetricsException extends RuntimeException {

View File

@ -67,7 +67,10 @@ import org.apache.hadoop.classification.InterfaceStability;
* it is OK for different threads to call <code>update()</code> on MetricsRecord instances * it is OK for different threads to call <code>update()</code> on MetricsRecord instances
* with the same set of tag names and tag values. Different threads should * with the same set of tag names and tag values. Different threads should
* <b>not</b> use the same MetricsRecord instance at the same time. * <b>not</b> use the same MetricsRecord instance at the same time.
*
* @deprecated Use {@link org.apache.hadoop.metrics2.MetricsRecord} instead.
*/ */
@Deprecated
@InterfaceAudience.Private @InterfaceAudience.Private
@InterfaceStability.Evolving @InterfaceStability.Evolving
public interface MetricsRecord { public interface MetricsRecord {

View File

@ -43,7 +43,10 @@ import org.mortbay.util.ajax.JSON.Output;
* A servlet to print out metrics data. By default, the servlet returns a * A servlet to print out metrics data. By default, the servlet returns a
* textual representation (no promises are made for parseability), and * textual representation (no promises are made for parseability), and
* users can use "?format=json" for parseable output. * users can use "?format=json" for parseable output.
*
* @deprecated Use org.apache.hadoop.metrics2 package instead.
*/ */
@Deprecated
@InterfaceAudience.Private @InterfaceAudience.Private
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class MetricsServlet extends HttpServlet { public class MetricsServlet extends HttpServlet {

View File

@ -32,7 +32,9 @@ import org.apache.hadoop.classification.InterfaceStability;
* @see org.apache.hadoop.metrics.MetricsRecord * @see org.apache.hadoop.metrics.MetricsRecord
* @see org.apache.hadoop.metrics.MetricsContext * @see org.apache.hadoop.metrics.MetricsContext
* @see org.apache.hadoop.metrics.ContextFactory * @see org.apache.hadoop.metrics.ContextFactory
* @deprecated Use org.apache.hadoop.metrics2 package instead.
*/ */
@Deprecated
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class MetricsUtil { public class MetricsUtil {

View File

@ -25,7 +25,10 @@ import org.apache.hadoop.classification.InterfaceStability;
/** /**
* Call-back interface. See <code>MetricsContext.registerUpdater()</code>. * Call-back interface. See <code>MetricsContext.registerUpdater()</code>.
*
* @deprecated Use org.apache.hadoop.metrics2 package instead.
*/ */
@Deprecated
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
@InterfaceStability.Evolving @InterfaceStability.Evolving
public interface Updater { public interface Updater {

View File

@ -39,8 +39,11 @@ import org.apache.hadoop.metrics.spi.Util;
/** /**
* Context for sending metrics to Ganglia. * Context for sending metrics to Ganglia.
* *
* @deprecated Use {@link org.apache.hadoop.metrics2.sink.ganglia.GangliaSink30}
* instead.
*/ */
@Deprecated
@InterfaceAudience.Public @InterfaceAudience.Public
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class GangliaContext extends AbstractMetricsContext { public class GangliaContext extends AbstractMetricsContext {

View File

@ -35,7 +35,11 @@ import org.apache.hadoop.net.DNS;
* Context for sending metrics to Ganglia version 3.1.x. * Context for sending metrics to Ganglia version 3.1.x.
* *
* 3.1.1 has a slightly different wire portal compared to 3.0.x. * 3.1.1 has a slightly different wire portal compared to 3.0.x.
*
* @deprecated Use {@link org.apache.hadoop.metrics2.sink.ganglia.GangliaSink31}
* instead.
*/ */
@Deprecated
public class GangliaContext31 extends GangliaContext { public class GangliaContext31 extends GangliaContext {
String hostName = "UNKNOWN.example.com"; String hostName = "UNKNOWN.example.com";

View File

@ -20,7 +20,10 @@ package org.apache.hadoop.metrics.jvm;
/** /**
* A log4J Appender that simply counts logging events in three levels: * A log4J Appender that simply counts logging events in three levels:
* fatal, error and warn. * fatal, error and warn.
*
* @deprecated Use org.apache.hadoop.metrics2 package instead.
*/ */
@Deprecated
public class EventCounter extends org.apache.hadoop.log.metrics.EventCounter { public class EventCounter extends org.apache.hadoop.log.metrics.EventCounter {
static { static {

View File

@ -40,7 +40,10 @@ import org.apache.commons.logging.LogFactory;
* Singleton class which reports Java Virtual Machine metrics to the metrics API. * Singleton class which reports Java Virtual Machine metrics to the metrics API.
* Any application can create an instance of this class in order to emit * Any application can create an instance of this class in order to emit
* Java VM metrics. * Java VM metrics.
*
* @deprecated Use {@link org.apache.hadoop.metrics2.source.JvmMetrics} instead.
*/ */
@Deprecated
@InterfaceAudience.Private @InterfaceAudience.Private
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class JvmMetrics implements Updater { public class JvmMetrics implements Updater {

View File

@ -51,7 +51,10 @@ import org.apache.hadoop.metrics.Updater;
* on which data is to be sent to the metrics system. Subclasses must * on which data is to be sent to the metrics system. Subclasses must
* override the abstract <code>emitRecord</code> method in order to transmit * override the abstract <code>emitRecord</code> method in order to transmit
* the data. <p/> * the data. <p/>
*
* @deprecated Use org.apache.hadoop.metrics2 package instead.
*/ */
@Deprecated
@InterfaceAudience.Public @InterfaceAudience.Public
@InterfaceStability.Evolving @InterfaceStability.Evolving
public abstract class AbstractMetricsContext implements MetricsContext { public abstract class AbstractMetricsContext implements MetricsContext {

View File

@ -34,6 +34,10 @@ import org.apache.hadoop.metrics.MetricsRecord;
import org.apache.hadoop.metrics.MetricsUtil; import org.apache.hadoop.metrics.MetricsUtil;
import org.apache.hadoop.metrics.Updater; import org.apache.hadoop.metrics.Updater;
/**
* @deprecated Use org.apache.hadoop.metrics2 package instead.
*/
@Deprecated
@InterfaceAudience.Public @InterfaceAudience.Public
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class CompositeContext extends AbstractMetricsContext { public class CompositeContext extends AbstractMetricsContext {

View File

@ -26,6 +26,7 @@ import org.apache.hadoop.classification.InterfaceStability;
/** /**
* A Number that is either an absolute or an incremental amount. * A Number that is either an absolute or an incremental amount.
*/ */
@Deprecated
@InterfaceAudience.Public @InterfaceAudience.Public
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class MetricValue { public class MetricValue {

View File

@ -33,7 +33,11 @@ import org.apache.hadoop.metrics.spi.AbstractMetricsContext.TagMap;
* An implementation of MetricsRecord. Keeps a back-pointer to the context * An implementation of MetricsRecord. Keeps a back-pointer to the context
* from which it was created, and delegates back to it on <code>update</code> * from which it was created, and delegates back to it on <code>update</code>
* and <code>remove()</code>. * and <code>remove()</code>.
*
* @deprecated Use {@link org.apache.hadoop.metrics2.impl.MetricsRecordImpl}
* instead.
*/ */
@Deprecated
@InterfaceAudience.Public @InterfaceAudience.Public
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class MetricsRecordImpl implements MetricsRecord { public class MetricsRecordImpl implements MetricsRecord {

View File

@ -28,7 +28,10 @@ import org.apache.hadoop.metrics.MetricsServlet;
* *
* This is useful if you want to support {@link MetricsServlet}, but * This is useful if you want to support {@link MetricsServlet}, but
* not emit metrics in any other way. * not emit metrics in any other way.
*
* @deprecated Use org.apache.hadoop.metrics2 package instead.
*/ */
@Deprecated
@InterfaceAudience.Public @InterfaceAudience.Public
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class NoEmitMetricsContext extends AbstractMetricsContext { public class NoEmitMetricsContext extends AbstractMetricsContext {

View File

@ -27,7 +27,9 @@ import org.apache.hadoop.classification.InterfaceStability;
* default context, so that no performance data is emitted if no configuration * default context, so that no performance data is emitted if no configuration
* data is found. * data is found.
* *
* @deprecated Use org.apache.hadoop.metrics2 package instead.
*/ */
@Deprecated
@InterfaceAudience.Public @InterfaceAudience.Public
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class NullContext extends AbstractMetricsContext { public class NullContext extends AbstractMetricsContext {

View File

@ -33,7 +33,9 @@ import org.apache.hadoop.metrics.ContextFactory;
* The default impl of start and stop monitoring: * The default impl of start and stop monitoring:
* is the AbstractMetricsContext is good enough. * is the AbstractMetricsContext is good enough.
* *
* @deprecated Use org.apache.hadoop.metrics2 package instead.
*/ */
@Deprecated
@InterfaceAudience.Public @InterfaceAudience.Public
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class NullContextWithUpdateThread extends AbstractMetricsContext { public class NullContextWithUpdateThread extends AbstractMetricsContext {

View File

@ -29,7 +29,10 @@ import org.apache.hadoop.metrics.spi.AbstractMetricsContext.TagMap;
/** /**
* Represents a record of metric data to be sent to a metrics system. * Represents a record of metric data to be sent to a metrics system.
*
* @deprecated Use org.apache.hadoop.metrics2 package instead.
*/ */
@Deprecated
@InterfaceAudience.Public @InterfaceAudience.Public
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class OutputRecord { public class OutputRecord {

View File

@ -31,7 +31,10 @@ import org.apache.hadoop.net.NetUtils;
/** /**
* Static utility methods * Static utility methods
*
* @deprecated Use org.apache.hadoop.metrics2 package instead.
*/ */
@Deprecated
@InterfaceAudience.Public @InterfaceAudience.Public
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class Util { public class Util {

View File

@ -33,7 +33,9 @@ import org.apache.hadoop.classification.InterfaceAudience;
* our standard naming convention as described in the doc * our standard naming convention as described in the doc
* for {link {@link #registerMBean(String, String, Object)} * for {link {@link #registerMBean(String, String, Object)}
* *
* @deprecated Use {@link org.apache.hadoop.metrics2.util.MBeans} instead.
*/ */
@Deprecated
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
public class MBeanUtil { public class MBeanUtil {

View File

@ -24,7 +24,9 @@ import org.apache.hadoop.metrics.MetricsRecord;
* *
* This is base class for all metrics * This is base class for all metrics
* *
* @deprecated Use org.apache.hadoop.metrics2 package instead.
*/ */
@Deprecated
@InterfaceAudience.Private @InterfaceAudience.Private
public abstract class MetricsBase { public abstract class MetricsBase {
public static final String NO_DESCRIPTION = "NoDescription"; public static final String NO_DESCRIPTION = "NoDescription";

View File

@ -51,8 +51,9 @@ import org.apache.hadoop.metrics.MetricsUtil;
* ObjectName mbeanName = MBeanUtil.registerMBean("ServiceFoo", * ObjectName mbeanName = MBeanUtil.registerMBean("ServiceFoo",
* "TestStatistics", theMBean); * "TestStatistics", theMBean);
* *
* * @deprecated Use org.apache.hadoop.metrics2 package instead.
*/ */
@Deprecated
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
public abstract class MetricsDynamicMBeanBase implements DynamicMBean { public abstract class MetricsDynamicMBeanBase implements DynamicMBean {
private final static String AVG_TIME = "AvgTime"; private final static String AVG_TIME = "AvgTime";

View File

@ -28,7 +28,9 @@ import org.apache.commons.logging.LogFactory;
* Each time its value is set, it is published only *once* at the next update * Each time its value is set, it is published only *once* at the next update
* call. * call.
* *
* @deprecated Use org.apache.hadoop.metrics2 package instead.
*/ */
@Deprecated
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
public class MetricsIntValue extends MetricsBase { public class MetricsIntValue extends MetricsBase {

View File

@ -27,7 +27,9 @@ import org.apache.hadoop.metrics.MetricsRecord;
* Each time its value is set, it is published only *once* at the next update * Each time its value is set, it is published only *once* at the next update
* call. * call.
* *
* @deprecated Use org.apache.hadoop.metrics2 package instead.
*/ */
@Deprecated
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
public class MetricsLongValue extends MetricsBase{ public class MetricsLongValue extends MetricsBase{
private long value; private long value;

View File

@ -28,7 +28,9 @@ import org.apache.hadoop.classification.InterfaceAudience;
* Related set of metrics should be declared in a holding class and registered * Related set of metrics should be declared in a holding class and registered
* in a registry for those metrics which is also stored in the the holding class. * in a registry for those metrics which is also stored in the the holding class.
* *
* @deprecated Use org.apache.hadoop.metrics2 package instead.
*/ */
@Deprecated
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
public class MetricsRegistry { public class MetricsRegistry {
private ConcurrentHashMap<String, MetricsBase> metricsList = private ConcurrentHashMap<String, MetricsBase> metricsList =

View File

@ -31,8 +31,9 @@ import org.apache.commons.logging.LogFactory;
* *
* Note if one wants a time associated with the metric then use * Note if one wants a time associated with the metric then use
* @see org.apache.hadoop.metrics.util.MetricsTimeVaryingRate * @see org.apache.hadoop.metrics.util.MetricsTimeVaryingRate
* * @deprecated Use org.apache.hadoop.metrics2 package instead.
*/ */
@Deprecated
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
public class MetricsTimeVaryingInt extends MetricsBase { public class MetricsTimeVaryingInt extends MetricsBase {

View File

@ -32,8 +32,9 @@ import org.apache.commons.logging.LogFactory;
* *
* Note if one wants a time associated with the metric then use * Note if one wants a time associated with the metric then use
* @see org.apache.hadoop.metrics.util.MetricsTimeVaryingRate * @see org.apache.hadoop.metrics.util.MetricsTimeVaryingRate
* * @deprecated Use org.apache.hadoop.metrics2 package instead.
*/ */
@Deprecated
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
public class MetricsTimeVaryingLong extends MetricsBase{ public class MetricsTimeVaryingLong extends MetricsBase{

View File

@ -30,7 +30,9 @@ import org.apache.commons.logging.LogFactory;
* This class also keeps track of the min and max rates along with * This class also keeps track of the min and max rates along with
* a method to reset the min-max. * a method to reset the min-max.
* *
* @deprecated Use org.apache.hadoop.metrics2 package instead.
*/ */
@Deprecated
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"}) @InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
public class MetricsTimeVaryingRate extends MetricsBase { public class MetricsTimeVaryingRate extends MetricsBase {

View File

@ -32,6 +32,7 @@ import org.apache.hadoop.metrics.spi.NoEmitMetricsContext;
import org.apache.hadoop.metrics.spi.OutputRecord; import org.apache.hadoop.metrics.spi.OutputRecord;
import org.mortbay.util.ajax.JSON; import org.mortbay.util.ajax.JSON;
@Deprecated
public class TestMetricsServlet extends TestCase { public class TestMetricsServlet extends TestCase {
MetricsContext nc1; MetricsContext nc1;
MetricsContext nc2; MetricsContext nc2;

View File

@ -32,6 +32,7 @@ import org.apache.hadoop.metrics.spi.AbstractMetricsContext;
import java.net.MulticastSocket; import java.net.MulticastSocket;
@Deprecated
public class TestGangliaContext { public class TestGangliaContext {
@Test @Test
public void testShouldCreateDatagramSocketByDefault() throws Exception { public void testShouldCreateDatagramSocketByDefault() throws Exception {

View File

@ -22,6 +22,7 @@ import org.apache.hadoop.metrics.spi.AbstractMetricsContext.TagMap;
import junit.framework.TestCase; import junit.framework.TestCase;
@Deprecated
public class TestOutputRecord extends TestCase { public class TestOutputRecord extends TestCase {
public void testCopy() { public void testCopy() {
TagMap tags = new TagMap(); TagMap tags = new TagMap();