HBASE-9579 Sanity check visiblity and audience for server-side modules

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1525055 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jonathan Hsieh 2013-09-20 17:09:39 +00:00
parent 015df40389
commit d2ddf0986a
86 changed files with 232 additions and 123 deletions

View File

@ -19,15 +19,13 @@
package org.apache.hadoop.hbase.client; package org.apache.hadoop.hbase.client;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/** /**
* A Get, Put or Delete associated with it's region. Used internally by * A Get, Put or Delete associated with it's region. Used internally by
* {@link HTable#batch} to associate the action with it's region and maintain * {@link HTable#batch} to associate the action with it's region and maintain
* the index from the original request. * the index from the original request.
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Stable
public class Action<R> implements Comparable<R> { public class Action<R> implements Comparable<R> {
// TODO: This class should not be visible outside of the client package. // TODO: This class should not be visible outside of the client package.
private Row action; private Row action;

View File

@ -0,0 +1,30 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/**
* This class defines constants for different classes of hbase limited private apis
*/
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class HBaseInterfaceAudience {
public static final String COPROC = "Coprocesssor";
}

View File

@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.thrift;
import org.apache.hadoop.hbase.metrics.BaseSource; import org.apache.hadoop.hbase.metrics.BaseSource;
/** /**
* Inteface of a class that will export metrics about Thrift to hadoop's metrics2. * Interface of a class that will export metrics about Thrift to hadoop's metrics2.
*/ */
public interface MetricsThriftServerSource extends BaseSource { public interface MetricsThriftServerSource extends BaseSource {

View File

@ -35,8 +35,7 @@ import java.util.concurrent.TimeUnit;
* low error bounds. This is particularly useful for accurate high-percentile (e.g. 95th, 99th) * low error bounds. This is particularly useful for accurate high-percentile (e.g. 95th, 99th)
* latency metrics. * latency metrics.
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Evolving
public class MetricMutableQuantiles extends MetricMutable implements MetricHistogram { public class MetricMutableQuantiles extends MetricMutable implements MetricHistogram {
static final MetricQuantile[] quantiles = {new MetricQuantile(0.50, 0.050), static final MetricQuantile[] quantiles = {new MetricQuantile(0.50, 0.050),

View File

@ -21,6 +21,9 @@ package org.apache.hadoop.hbase.ipc;
import java.util.HashMap; import java.util.HashMap;
import org.apache.hadoop.classification.InterfaceAudience;
@InterfaceAudience.Private
public class MetricsHBaseServerSourceFactoryImpl extends MetricsHBaseServerSourceFactory { public class MetricsHBaseServerSourceFactoryImpl extends MetricsHBaseServerSourceFactory {
private static enum SourceStorage { private static enum SourceStorage {
INSTANCE; INSTANCE;

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.hbase.ipc; package org.apache.hadoop.hbase.ipc;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.BaseSourceImpl; import org.apache.hadoop.hbase.metrics.BaseSourceImpl;
import org.apache.hadoop.metrics2.MetricsCollector; import org.apache.hadoop.metrics2.MetricsCollector;
import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.MetricsRecordBuilder;
@ -26,6 +27,7 @@ import org.apache.hadoop.metrics2.lib.Interns;
import org.apache.hadoop.metrics2.lib.MutableCounterLong; import org.apache.hadoop.metrics2.lib.MutableCounterLong;
import org.apache.hadoop.metrics2.lib.MutableHistogram; import org.apache.hadoop.metrics2.lib.MutableHistogram;
@InterfaceAudience.Private
public class MetricsHBaseServerSourceImpl extends BaseSourceImpl public class MetricsHBaseServerSourceImpl extends BaseSourceImpl
implements MetricsHBaseServerSource { implements MetricsHBaseServerSource {

View File

@ -18,11 +18,12 @@
package org.apache.hadoop.hbase.master; package org.apache.hadoop.hbase.master;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.BaseSourceImpl; import org.apache.hadoop.hbase.metrics.BaseSourceImpl;
import org.apache.hadoop.metrics2.lib.MutableGaugeLong; import org.apache.hadoop.metrics2.lib.MutableGaugeLong;
import org.apache.hadoop.metrics2.lib.MutableHistogram; import org.apache.hadoop.metrics2.lib.MutableHistogram;
@InterfaceAudience.Private
public class MetricsAssignmentManagerSourceImpl extends BaseSourceImpl implements MetricsAssignmentManagerSource { public class MetricsAssignmentManagerSourceImpl extends BaseSourceImpl implements MetricsAssignmentManagerSource {
private MutableGaugeLong ritGauge; private MutableGaugeLong ritGauge;

View File

@ -18,9 +18,11 @@
package org.apache.hadoop.hbase.master; package org.apache.hadoop.hbase.master;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.BaseSourceImpl; import org.apache.hadoop.hbase.metrics.BaseSourceImpl;
import org.apache.hadoop.metrics2.lib.MutableHistogram; import org.apache.hadoop.metrics2.lib.MutableHistogram;
@InterfaceAudience.Private
public class MetricsMasterFilesystemSourceImpl extends BaseSourceImpl implements MetricsMasterFileSystemSource { public class MetricsMasterFilesystemSourceImpl extends BaseSourceImpl implements MetricsMasterFileSystemSource {
private MutableHistogram splitSizeHisto; private MutableHistogram splitSizeHisto;

View File

@ -18,9 +18,12 @@
package org.apache.hadoop.hbase.master; package org.apache.hadoop.hbase.master;
import org.apache.hadoop.classification.InterfaceAudience;
/** /**
* Factory to create MetricsMasterSource when given a MetricsMasterWrapper * Factory to create MetricsMasterSource when given a MetricsMasterWrapper
*/ */
@InterfaceAudience.Private
public class MetricsMasterSourceFactoryImpl implements MetricsMasterSourceFactory { public class MetricsMasterSourceFactoryImpl implements MetricsMasterSourceFactory {
private static enum FactoryStorage { private static enum FactoryStorage {
INSTANCE; INSTANCE;

View File

@ -18,20 +18,19 @@
package org.apache.hadoop.hbase.master; package org.apache.hadoop.hbase.master;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.BaseSourceImpl; import org.apache.hadoop.hbase.metrics.BaseSourceImpl;
import org.apache.hadoop.metrics2.MetricsCollector; import org.apache.hadoop.metrics2.MetricsCollector;
import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.lib.Interns; import org.apache.hadoop.metrics2.lib.Interns;
import org.apache.hadoop.metrics2.lib.MutableCounterLong; import org.apache.hadoop.metrics2.lib.MutableCounterLong;
import org.apache.hadoop.metrics2.lib.MutableGaugeLong;
import org.apache.hadoop.metrics2.lib.MutableHistogram;
import org.apache.hadoop.metrics2.lib.MutableStat;
/** /**
* Hadoop2 implementation of MetricsMasterSource. * Hadoop2 implementation of MetricsMasterSource.
* *
* Implements BaseSource through BaseSourceImpl, following the pattern * Implements BaseSource through BaseSourceImpl, following the pattern
*/ */
@InterfaceAudience.Private
public class MetricsMasterSourceImpl public class MetricsMasterSourceImpl
extends BaseSourceImpl implements MetricsMasterSource { extends BaseSourceImpl implements MetricsMasterSource {

View File

@ -18,9 +18,11 @@
package org.apache.hadoop.hbase.master; package org.apache.hadoop.hbase.master;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.BaseSourceImpl; import org.apache.hadoop.hbase.metrics.BaseSourceImpl;
import org.apache.hadoop.metrics2.lib.MutableHistogram; import org.apache.hadoop.metrics2.lib.MutableHistogram;
@InterfaceAudience.Private
public class MetricsSnapshotSourceImpl extends BaseSourceImpl implements MetricsSnapshotSource { public class MetricsSnapshotSourceImpl extends BaseSourceImpl implements MetricsSnapshotSource {
private MutableHistogram snapshotTimeHisto; private MutableHistogram snapshotTimeHisto;

View File

@ -18,10 +18,12 @@
package org.apache.hadoop.hbase.master.balancer; package org.apache.hadoop.hbase.master.balancer;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.BaseSourceImpl; import org.apache.hadoop.hbase.metrics.BaseSourceImpl;
import org.apache.hadoop.metrics2.lib.MutableCounterLong; import org.apache.hadoop.metrics2.lib.MutableCounterLong;
import org.apache.hadoop.metrics2.lib.MutableHistogram; import org.apache.hadoop.metrics2.lib.MutableHistogram;
@InterfaceAudience.Private
public class MetricsBalancerSourceImpl extends BaseSourceImpl implements MetricsBalancerSource{ public class MetricsBalancerSourceImpl extends BaseSourceImpl implements MetricsBalancerSource{
private MutableHistogram blanceClusterHisto; private MutableHistogram blanceClusterHisto;

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.hbase.metrics; package org.apache.hadoop.hbase.metrics;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.metrics2.MetricsCollector; import org.apache.hadoop.metrics2.MetricsCollector;
import org.apache.hadoop.metrics2.MetricsSource; import org.apache.hadoop.metrics2.MetricsSource;
import org.apache.hadoop.metrics2.impl.JmxCacheBuster; import org.apache.hadoop.metrics2.impl.JmxCacheBuster;
@ -35,6 +36,7 @@ import org.apache.hadoop.metrics2.source.JvmMetrics;
* *
* All MetricsSource's in hbase-hadoop2-compat should derive from this class. * All MetricsSource's in hbase-hadoop2-compat should derive from this class.
*/ */
@InterfaceAudience.Private
public class BaseSourceImpl implements BaseSource, MetricsSource { public class BaseSourceImpl implements BaseSource, MetricsSource {
private static enum DefaultMetricsSystemInitializer { private static enum DefaultMetricsSystemInitializer {

View File

@ -18,13 +18,15 @@
package org.apache.hadoop.hbase.metrics; package org.apache.hadoop.hbase.metrics;
import org.apache.hadoop.metrics2.util.MBeans;
import javax.management.ObjectName; import javax.management.ObjectName;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.metrics2.util.MBeans;
/** /**
* Hadoop2 metrics2 implementation of an object that registers MBeans. * Hadoop2 metrics2 implementation of an object that registers MBeans.
*/ */
@InterfaceAudience.Private
public class MBeanSourceImpl implements MBeanSource { public class MBeanSourceImpl implements MBeanSource {
/** /**

View File

@ -18,13 +18,15 @@
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import java.util.TreeSet;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.BaseSourceImpl; import org.apache.hadoop.hbase.metrics.BaseSourceImpl;
import org.apache.hadoop.metrics2.MetricsCollector; import org.apache.hadoop.metrics2.MetricsCollector;
import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import java.util.TreeSet; @InterfaceAudience.Private
import java.util.concurrent.locks.ReentrantReadWriteLock;
public class MetricsRegionAggregateSourceImpl extends BaseSourceImpl public class MetricsRegionAggregateSourceImpl extends BaseSourceImpl
implements MetricsRegionAggregateSource { implements MetricsRegionAggregateSource {

View File

@ -18,9 +18,12 @@
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import org.apache.hadoop.classification.InterfaceAudience;
/** /**
* Factory to create MetricsRegionServerSource when given a MetricsRegionServerWrapper * Factory to create MetricsRegionServerSource when given a MetricsRegionServerWrapper
*/ */
@InterfaceAudience.Private
public class MetricsRegionServerSourceFactoryImpl implements MetricsRegionServerSourceFactory { public class MetricsRegionServerSourceFactoryImpl implements MetricsRegionServerSourceFactory {
public static enum FactoryStorage { public static enum FactoryStorage {
INSTANCE; INSTANCE;

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.BaseSourceImpl; import org.apache.hadoop.hbase.metrics.BaseSourceImpl;
import org.apache.hadoop.metrics2.MetricHistogram; import org.apache.hadoop.metrics2.MetricHistogram;
import org.apache.hadoop.metrics2.MetricsCollector; import org.apache.hadoop.metrics2.MetricsCollector;
@ -30,6 +31,7 @@ import org.apache.hadoop.metrics2.lib.MutableCounterLong;
* *
* Implements BaseSource through BaseSourceImpl, following the pattern * Implements BaseSource through BaseSourceImpl, following the pattern
*/ */
@InterfaceAudience.Private
public class MetricsRegionServerSourceImpl public class MetricsRegionServerSourceImpl
extends BaseSourceImpl implements MetricsRegionServerSource { extends BaseSourceImpl implements MetricsRegionServerSource {

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.regionserver;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.impl.JmxCacheBuster; import org.apache.hadoop.metrics2.impl.JmxCacheBuster;
import org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry; import org.apache.hadoop.metrics2.lib.DynamicMetricsRegistry;
@ -27,6 +28,7 @@ import org.apache.hadoop.metrics2.lib.Interns;
import org.apache.hadoop.metrics2.lib.MutableCounterLong; import org.apache.hadoop.metrics2.lib.MutableCounterLong;
import org.apache.hadoop.metrics2.lib.MutableStat; import org.apache.hadoop.metrics2.lib.MutableStat;
@InterfaceAudience.Private
public class MetricsRegionSourceImpl implements MetricsRegionSource { public class MetricsRegionSourceImpl implements MetricsRegionSource {
private final MetricsRegionWrapper regionWrapper; private final MetricsRegionWrapper regionWrapper;

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.regionserver.wal;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.BaseSourceImpl; import org.apache.hadoop.hbase.metrics.BaseSourceImpl;
import org.apache.hadoop.metrics2.MetricHistogram; import org.apache.hadoop.metrics2.MetricHistogram;
@ -27,6 +28,7 @@ import org.apache.hadoop.metrics2.MetricHistogram;
* Hadoop1 implementation of MetricsMasterSource. Implements BaseSource through BaseSourceImpl, * Hadoop1 implementation of MetricsMasterSource. Implements BaseSource through BaseSourceImpl,
* following the pattern * following the pattern
*/ */
@InterfaceAudience.Private
public class MetricsEditsReplaySourceImpl extends BaseSourceImpl implements public class MetricsEditsReplaySourceImpl extends BaseSourceImpl implements
MetricsEditsReplaySource { MetricsEditsReplaySource {

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.hbase.regionserver.wal; package org.apache.hadoop.hbase.regionserver.wal;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.BaseSourceImpl; import org.apache.hadoop.hbase.metrics.BaseSourceImpl;
import org.apache.hadoop.metrics2.MetricHistogram; import org.apache.hadoop.metrics2.MetricHistogram;
import org.apache.hadoop.metrics2.lib.MutableCounterLong; import org.apache.hadoop.metrics2.lib.MutableCounterLong;
@ -28,6 +29,7 @@ import org.apache.hadoop.metrics2.lib.MutableCounterLong;
* *
* Implements BaseSource through BaseSourceImpl, following the pattern. * Implements BaseSource through BaseSourceImpl, following the pattern.
*/ */
@InterfaceAudience.Private
public class MetricsWALSourceImpl extends BaseSourceImpl implements MetricsWALSource { public class MetricsWALSourceImpl extends BaseSourceImpl implements MetricsWALSource {
private final MetricHistogram appendSizeHisto; private final MetricHistogram appendSizeHisto;

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.hbase.replication.regionserver; package org.apache.hadoop.hbase.replication.regionserver;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.BaseSourceImpl; import org.apache.hadoop.hbase.metrics.BaseSourceImpl;
/** /**
@ -26,6 +27,7 @@ import org.apache.hadoop.hbase.metrics.BaseSourceImpl;
* *
* Implements BaseSource through BaseSourceImpl, following the pattern * Implements BaseSource through BaseSourceImpl, following the pattern
*/ */
@InterfaceAudience.Private
public class MetricsReplicationSourceImpl extends BaseSourceImpl implements public class MetricsReplicationSourceImpl extends BaseSourceImpl implements
MetricsReplicationSource { MetricsReplicationSource {

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.hbase.rest; package org.apache.hadoop.hbase.rest;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.BaseSourceImpl; import org.apache.hadoop.hbase.metrics.BaseSourceImpl;
import org.apache.hadoop.metrics2.lib.MutableCounterLong; import org.apache.hadoop.metrics2.lib.MutableCounterLong;
@ -27,6 +28,7 @@ import org.apache.hadoop.metrics2.lib.MutableCounterLong;
* *
* Implements BaseSource through BaseSourceImpl, following the pattern * Implements BaseSource through BaseSourceImpl, following the pattern
*/ */
@InterfaceAudience.Private
public class MetricsRESTSourceImpl extends BaseSourceImpl implements MetricsRESTSource { public class MetricsRESTSourceImpl extends BaseSourceImpl implements MetricsRESTSource {
private MutableCounterLong request; private MutableCounterLong request;

View File

@ -18,9 +18,12 @@
package org.apache.hadoop.hbase.thrift; package org.apache.hadoop.hbase.thrift;
import org.apache.hadoop.classification.InterfaceAudience;
/** /**
* Class used to create metrics sources for Thrift and Thrift2 servers. * Class used to create metrics sources for Thrift and Thrift2 servers.
*/ */
@InterfaceAudience.Private
public class MetricsThriftServerSourceFactoryImpl implements MetricsThriftServerSourceFactory { public class MetricsThriftServerSourceFactoryImpl implements MetricsThriftServerSourceFactory {
/** /**

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.hbase.thrift; package org.apache.hadoop.hbase.thrift;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.hbase.metrics.BaseSourceImpl; import org.apache.hadoop.hbase.metrics.BaseSourceImpl;
import org.apache.hadoop.metrics2.lib.MutableGaugeLong; import org.apache.hadoop.metrics2.lib.MutableGaugeLong;
import org.apache.hadoop.metrics2.lib.MutableStat; import org.apache.hadoop.metrics2.lib.MutableStat;
@ -27,6 +28,7 @@ import org.apache.hadoop.metrics2.lib.MutableStat;
* *
* Implements BaseSource through BaseSourceImpl, following the pattern * Implements BaseSource through BaseSourceImpl, following the pattern
*/ */
@InterfaceAudience.Private
public class MetricsThriftServerSourceImpl extends BaseSourceImpl implements public class MetricsThriftServerSourceImpl extends BaseSourceImpl implements
MetricsThriftServerSource { MetricsThriftServerSource {

View File

@ -18,15 +18,16 @@
package org.apache.hadoop.metrics2.impl; package org.apache.hadoop.metrics2.impl;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.metrics2.MetricsExecutor; import org.apache.hadoop.metrics2.MetricsExecutor;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.metrics2.lib.MetricsExecutorImpl; import org.apache.hadoop.metrics2.lib.MetricsExecutorImpl;
import java.util.concurrent.ScheduledFuture;
import java.util.concurrent.TimeUnit;
/** /**
* JMX caches the beans that have been exported; even after the values are removed from hadoop's * JMX caches the beans that have been exported; even after the values are removed from hadoop's
* metrics system the keys and old values will still remain. This class stops and restarts the * metrics system the keys and old values will still remain. This class stops and restarts the
@ -38,6 +39,7 @@ import java.util.concurrent.TimeUnit;
@edu.umd.cs.findbugs.annotations.SuppressWarnings( @edu.umd.cs.findbugs.annotations.SuppressWarnings(
value="LI_LAZY_INIT_STATIC", value="LI_LAZY_INIT_STATIC",
justification="Yeah, its weird but its what we want") justification="Yeah, its weird but its what we want")
@InterfaceAudience.Private
public class JmxCacheBuster { public class JmxCacheBuster {
private static final Log LOG = LogFactory.getLog(JmxCacheBuster.class); private static final Log LOG = LogFactory.getLog(JmxCacheBuster.class);
private static Object lock = new Object(); private static Object lock = new Object();

View File

@ -21,16 +21,16 @@ package org.apache.hadoop.metrics2.lib;
import java.util.Collection; import java.util.Collection;
import java.util.concurrent.ConcurrentMap; import java.util.concurrent.ConcurrentMap;
import com.google.common.base.Objects;
import com.google.common.collect.Maps;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricsException; import org.apache.hadoop.metrics2.MetricsException;
import org.apache.hadoop.metrics2.MetricsInfo; import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.MetricsTag; import org.apache.hadoop.metrics2.MetricsTag;
import org.apache.hadoop.metrics2.impl.MsInfo; import org.apache.hadoop.metrics2.impl.MsInfo;
import com.google.common.base.Objects;
import com.google.common.collect.Maps;
/** /**
* An optional metrics registry class for creating and maintaining a * An optional metrics registry class for creating and maintaining a
* collection of MetricsMutables, making writing metrics source easier. * collection of MetricsMutables, making writing metrics source easier.
@ -42,8 +42,7 @@ import org.apache.hadoop.metrics2.impl.MsInfo;
* Another difference is that metricsMap implementation is substituted with * Another difference is that metricsMap implementation is substituted with
* thread-safe map, as we allow dynamic metrics additions/removals. * thread-safe map, as we allow dynamic metrics additions/removals.
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Evolving
public class DynamicMetricsRegistry { public class DynamicMetricsRegistry {
private final ConcurrentMap<String, MutableMetric> metricsMap = private final ConcurrentMap<String, MutableMetric> metricsMap =
Maps.newConcurrentMap(); Maps.newConcurrentMap();

View File

@ -18,10 +18,14 @@
package org.apache.hadoop.metrics2.lib; package org.apache.hadoop.metrics2.lib;
import com.google.common.annotations.VisibleForTesting; import static org.apache.hadoop.metrics2.lib.Interns.info;
import java.io.IOException;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.metrics2.MetricHistogram; import org.apache.hadoop.metrics2.MetricHistogram;
import org.apache.hadoop.metrics2.MetricsExecutor; import org.apache.hadoop.metrics2.MetricsExecutor;
import org.apache.hadoop.metrics2.MetricsInfo; import org.apache.hadoop.metrics2.MetricsInfo;
@ -29,19 +33,14 @@ import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.util.MetricQuantile; import org.apache.hadoop.metrics2.util.MetricQuantile;
import org.apache.hadoop.metrics2.util.MetricSampleQuantiles; import org.apache.hadoop.metrics2.util.MetricSampleQuantiles;
import java.io.IOException; import com.google.common.annotations.VisibleForTesting;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import static org.apache.hadoop.metrics2.lib.Interns.info;
/** /**
* Watches a stream of long values, maintaining online estimates of specific quantiles with provably * Watches a stream of long values, maintaining online estimates of specific quantiles with provably
* low error bounds. This is particularly useful for accurate high-percentile (e.g. 95th, 99th) * low error bounds. This is particularly useful for accurate high-percentile (e.g. 95th, 99th)
* latency metrics. * latency metrics.
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Evolving
public class MetricMutableQuantiles extends MutableMetric implements MetricHistogram { public class MetricMutableQuantiles extends MutableMetric implements MetricHistogram {
static final MetricQuantile[] quantiles = {new MetricQuantile(0.50, 0.050), static final MetricQuantile[] quantiles = {new MetricQuantile(0.50, 0.050),

View File

@ -18,16 +18,18 @@
package org.apache.hadoop.metrics2.lib; package org.apache.hadoop.metrics2.lib;
import org.apache.hadoop.metrics2.MetricsExecutor;
import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.ScheduledThreadPoolExecutor; import java.util.concurrent.ScheduledThreadPoolExecutor;
import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadFactory;
import java.util.concurrent.atomic.AtomicInteger; import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.metrics2.MetricsExecutor;
/** /**
* Class to handle the ScheduledExecutorService{@link ScheduledExecutorService} used by MetricMutableQuantiles{@link MetricMutableQuantiles} * Class to handle the ScheduledExecutorService{@link ScheduledExecutorService} used by MetricMutableQuantiles{@link MetricMutableQuantiles}
*/ */
@InterfaceAudience.Private
public class MetricsExecutorImpl implements MetricsExecutor { public class MetricsExecutorImpl implements MetricsExecutor {
@Override @Override

View File

@ -18,19 +18,22 @@
package org.apache.hadoop.metrics2.lib; package org.apache.hadoop.metrics2.lib;
import com.yammer.metrics.stats.ExponentiallyDecayingSample; import java.util.concurrent.atomic.AtomicLong;
import com.yammer.metrics.stats.Sample;
import com.yammer.metrics.stats.Snapshot;
import org.apache.commons.lang.StringUtils; import org.apache.commons.lang.StringUtils;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.metrics2.MetricHistogram; import org.apache.hadoop.metrics2.MetricHistogram;
import org.apache.hadoop.metrics2.MetricsInfo; import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import java.util.concurrent.atomic.AtomicLong; import com.yammer.metrics.stats.ExponentiallyDecayingSample;
import com.yammer.metrics.stats.Sample;
import com.yammer.metrics.stats.Snapshot;
/** /**
* A histogram implementation that runs in constant space, and exports to hadoop2's metrics2 system. * A histogram implementation that runs in constant space, and exports to hadoop2's metrics2 system.
*/ */
@InterfaceAudience.Private
public class MutableHistogram extends MutableMetric implements MetricHistogram { public class MutableHistogram extends MutableMetric implements MetricHistogram {
private static final int DEFAULT_SAMPLE_SIZE = 2046; private static final int DEFAULT_SAMPLE_SIZE = 2046;

View File

@ -19,7 +19,6 @@
package org.apache.hadoop.hbase.codec.prefixtree.scanner; package org.apache.hadoop.hbase.codec.prefixtree.scanner;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/** /**
* An indicator of the state of the scanner after an operation such as nextCell() or * An indicator of the state of the scanner after an operation such as nextCell() or
@ -33,8 +32,7 @@ import org.apache.hadoop.classification.InterfaceStability;
* next region.</li> * next region.</li>
* </ul> * </ul>
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Evolving
public enum CellScannerPosition { public enum CellScannerPosition {
/** /**

View File

@ -27,8 +27,7 @@ import org.apache.hadoop.classification.InterfaceStability;
* Get, remove and modify table descriptors. * Get, remove and modify table descriptors.
* Used by servers to host descriptors. * Used by servers to host descriptors.
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Evolving
public interface TableDescriptors { public interface TableDescriptors {
/** /**
* @param tableName * @param tableName

View File

@ -31,7 +31,7 @@ import org.apache.zookeeper.KeeperException;
/** /**
* Example class for how to use the table archiving coordinated via zookeeper * Example class for how to use the table archiving coordinated via zookeeper
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
public class ZKTableArchiveClient extends Configured { public class ZKTableArchiveClient extends Configured {
/** Configuration key for the archive node. */ /** Configuration key for the archive node. */

View File

@ -22,6 +22,7 @@ import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest; import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest;
import org.apache.hadoop.hbase.regionserver.RowProcessor; import org.apache.hadoop.hbase.regionserver.RowProcessor;
@ -31,7 +32,7 @@ import com.google.protobuf.Message;
* For example usage, refer TestRowProcessorEndpoint * For example usage, refer TestRowProcessorEndpoint
* *
*/ */
@InterfaceAudience.Public @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class RowProcessorClient { public class RowProcessorClient {
public static <S extends Message, T extends Message> public static <S extends Message, T extends Message>

View File

@ -36,8 +36,7 @@ import org.apache.hadoop.classification.InterfaceStability;
* Codec that just writes out Cell as a protobuf Cell Message. Does not write the mvcc stamp. * Codec that just writes out Cell as a protobuf Cell Message. Does not write the mvcc stamp.
* Use a different codec if you want that in the stream. * Use a different codec if you want that in the stream.
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Evolving
public class MessageCodec implements Codec { public class MessageCodec implements Codec {
static class MessageEncoder extends BaseEncoder { static class MessageEncoder extends BaseEncoder {
MessageEncoder(final OutputStream out) { MessageEncoder(final OutputStream out) {

View File

@ -27,7 +27,6 @@ import java.util.NavigableSet;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.CoprocessorEnvironment;
@ -58,8 +57,7 @@ import com.google.protobuf.Service;
* @param <Q> PB message that is used to transport Cell (<T>) instance * @param <Q> PB message that is used to transport Cell (<T>) instance
* @param <R> PB message that is used to transport Promoted (<S>) instance * @param <R> PB message that is used to transport Promoted (<S>) instance
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Evolving
public class AggregateImplementation<T, S, P extends Message, Q extends Message, R extends Message> public class AggregateImplementation<T, S, P extends Message, Q extends Message, R extends Message>
extends AggregateService implements CoprocessorService, Coprocessor { extends AggregateService implements CoprocessorService, Coprocessor {
protected static final Log log = LogFactory.getLog(AggregateImplementation.class); protected static final Log log = LogFactory.getLog(AggregateImplementation.class);

View File

@ -21,6 +21,7 @@ package org.apache.hadoop.hbase.coprocessor;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
@ -34,7 +35,7 @@ import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptio
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
@InterfaceAudience.Public @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class BaseMasterObserver implements MasterObserver { public class BaseMasterObserver implements MasterObserver {
@Override @Override

View File

@ -26,6 +26,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValueUtil; import org.apache.hadoop.hbase.KeyValueUtil;
@ -66,7 +67,7 @@ import com.google.common.collect.ImmutableList;
* By extending it, you can create your own region observer without * By extending it, you can create your own region observer without
* overriding all abstract methods of RegionObserver. * overriding all abstract methods of RegionObserver.
*/ */
@InterfaceAudience.Public @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
@InterfaceStability.Evolving @InterfaceStability.Evolving
public abstract class BaseRegionObserver implements RegionObserver { public abstract class BaseRegionObserver implements RegionObserver {
@Override @Override

View File

@ -25,6 +25,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.protobuf.ResponseConverter; import org.apache.hadoop.hbase.protobuf.ResponseConverter;
import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest; import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessRequest;
import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse; import org.apache.hadoop.hbase.protobuf.generated.RowProcessorProtos.ProcessResponse;
@ -42,7 +43,7 @@ import com.google.protobuf.Service;
* This class demonstrates how to implement atomic read-modify-writes * This class demonstrates how to implement atomic read-modify-writes
* using {@link HRegion#processRowsWithLocks} and Coprocessor endpoints. * using {@link HRegion#processRowsWithLocks} and Coprocessor endpoints.
*/ */
@InterfaceAudience.Public @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
@InterfaceStability.Evolving @InterfaceStability.Evolving
public abstract class BaseRowProcessorEndpoint<S extends Message, T extends Message> public abstract class BaseRowProcessorEndpoint<S extends Message, T extends Message>
extends RowProcessorService implements CoprocessorService, Coprocessor { extends RowProcessorService implements CoprocessorService, Coprocessor {

View File

@ -40,6 +40,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.DoNotRetryIOException; import org.apache.hadoop.hbase.DoNotRetryIOException;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.Server; import org.apache.hadoop.hbase.Server;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
@ -75,7 +76,7 @@ import com.google.protobuf.ServiceException;
* @param <E> the specific environment extension that a concrete implementation * @param <E> the specific environment extension that a concrete implementation
* provides * provides
*/ */
@InterfaceAudience.Public @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
@InterfaceStability.Evolving @InterfaceStability.Evolving
public abstract class CoprocessorHost<E extends CoprocessorEnvironment> { public abstract class CoprocessorHost<E extends CoprocessorEnvironment> {
public static final String REGION_COPROCESSOR_CONF_KEY = public static final String REGION_COPROCESSOR_CONF_KEY =

View File

@ -21,12 +21,13 @@ package org.apache.hadoop.hbase.coprocessor;
import com.google.protobuf.Service; import com.google.protobuf.Service;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
/** /**
* Coprocessor endpoints providing protobuf services should implement this * Coprocessor endpoints providing protobuf services should implement this
* interface and return the {@link Service} instance via {@link #getService()}. * interface and return the {@link Service} instance via {@link #getService()}.
*/ */
@InterfaceAudience.Public @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
@InterfaceStability.Evolving @InterfaceStability.Evolving
public interface CoprocessorService { public interface CoprocessorService {
Service getService(); Service getService();

View File

@ -22,9 +22,10 @@ package org.apache.hadoop.hbase.coprocessor;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.master.MasterServices; import org.apache.hadoop.hbase.master.MasterServices;
@InterfaceAudience.Public @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
@InterfaceStability.Evolving @InterfaceStability.Evolving
public interface MasterCoprocessorEnvironment extends CoprocessorEnvironment { public interface MasterCoprocessorEnvironment extends CoprocessorEnvironment {
/** @return reference to the HMaster services */ /** @return reference to the HMaster services */

View File

@ -25,6 +25,7 @@ import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
@ -38,7 +39,7 @@ import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.SnapshotDescriptio
* Defines coprocessor hooks for interacting with operations on the * Defines coprocessor hooks for interacting with operations on the
* {@link org.apache.hadoop.hbase.master.HMaster} process. * {@link org.apache.hadoop.hbase.master.HMaster} process.
*/ */
@InterfaceAudience.Public @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
@InterfaceStability.Evolving @InterfaceStability.Evolving
public interface MasterObserver extends Coprocessor { public interface MasterObserver extends Coprocessor {

View File

@ -27,6 +27,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.client.Mutation; import org.apache.hadoop.hbase.client.Mutation;
import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegion;
@ -73,7 +74,7 @@ import com.google.protobuf.Service;
* service.mutateRows(null, mrm); * service.mutateRows(null, mrm);
* </pre></code> * </pre></code>
*/ */
@InterfaceAudience.Public @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class MultiRowMutationEndpoint extends MultiRowMutationService implements public class MultiRowMutationEndpoint extends MultiRowMutationService implements
CoprocessorService, Coprocessor { CoprocessorService, Coprocessor {

View File

@ -22,6 +22,7 @@ package org.apache.hadoop.hbase.coprocessor;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
/** /**
* Carries the execution state for a given invocation of an Observer coprocessor * Carries the execution state for a given invocation of an Observer coprocessor
@ -33,7 +34,7 @@ import org.apache.hadoop.hbase.CoprocessorEnvironment;
* @param <E> The {@link CoprocessorEnvironment} subclass applicable to the * @param <E> The {@link CoprocessorEnvironment} subclass applicable to the
* revelant Observer interface. * revelant Observer interface.
*/ */
@InterfaceAudience.Public @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class ObserverContext<E extends CoprocessorEnvironment> { public class ObserverContext<E extends CoprocessorEnvironment> {
private E env; private E env;

View File

@ -24,10 +24,11 @@ import java.util.concurrent.ConcurrentMap;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.RegionServerServices; import org.apache.hadoop.hbase.regionserver.RegionServerServices;
@InterfaceAudience.Public @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
@InterfaceStability.Evolving @InterfaceStability.Evolving
public interface RegionCoprocessorEnvironment extends CoprocessorEnvironment { public interface RegionCoprocessorEnvironment extends CoprocessorEnvironment {
/** @return the region associated with this coprocessor */ /** @return the region associated with this coprocessor */

View File

@ -25,6 +25,7 @@ import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Append; import org.apache.hadoop.hbase.client.Append;
@ -62,7 +63,7 @@ import org.apache.hadoop.hbase.util.Pair;
* Coprocessors implement this interface to observe and mediate client actions * Coprocessors implement this interface to observe and mediate client actions
* on the region. * on the region.
*/ */
@InterfaceAudience.Public @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
@InterfaceStability.Evolving @InterfaceStability.Evolving
public interface RegionObserver extends Coprocessor { public interface RegionObserver extends Coprocessor {

View File

@ -22,9 +22,10 @@ package org.apache.hadoop.hbase.coprocessor;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.regionserver.wal.HLog; import org.apache.hadoop.hbase.regionserver.wal.HLog;
@InterfaceAudience.Public @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
@InterfaceStability.Evolving @InterfaceStability.Evolving
public interface WALCoprocessorEnvironment extends CoprocessorEnvironment { public interface WALCoprocessorEnvironment extends CoprocessorEnvironment {
/** @return reference to the region server services */ /** @return reference to the region server services */

View File

@ -22,6 +22,7 @@ package org.apache.hadoop.hbase.coprocessor;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.regionserver.wal.HLogKey; import org.apache.hadoop.hbase.regionserver.wal.HLogKey;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
@ -38,7 +39,7 @@ import java.io.IOException;
* Defines coprocessor hooks for interacting with operations on the * Defines coprocessor hooks for interacting with operations on the
* {@link org.apache.hadoop.hbase.regionserver.wal.HLog}. * {@link org.apache.hadoop.hbase.regionserver.wal.HLog}.
*/ */
@InterfaceAudience.Public @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
@InterfaceStability.Evolving @InterfaceStability.Evolving
public interface WALObserver extends Coprocessor { public interface WALObserver extends Coprocessor {

View File

@ -23,7 +23,6 @@ import java.util.List;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/** /**
* The dispatcher acts as the state holding entity for foreign error handling. The first * The dispatcher acts as the state holding entity for foreign error handling. The first
@ -39,8 +38,7 @@ import org.apache.hadoop.classification.InterfaceStability;
* This is thread-safe and must be because this is expected to be used to propagate exceptions * This is thread-safe and must be because this is expected to be used to propagate exceptions
* from foreign threads. * from foreign threads.
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Evolving
public class ForeignExceptionDispatcher implements ForeignExceptionListener, ForeignExceptionSnare { public class ForeignExceptionDispatcher implements ForeignExceptionListener, ForeignExceptionSnare {
public static final Log LOG = LogFactory.getLog(ForeignExceptionDispatcher.class); public static final Log LOG = LogFactory.getLog(ForeignExceptionDispatcher.class);
protected final String name; protected final String name;

View File

@ -36,8 +36,7 @@ import org.apache.hadoop.classification.InterfaceStability;
* the snare. The snare is referenced and checked by multiple threads which enables exception * the snare. The snare is referenced and checked by multiple threads which enables exception
* notification in all the involved threads/processes. * notification in all the involved threads/processes.
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Evolving
public interface ForeignExceptionSnare { public interface ForeignExceptionSnare {
/** /**

View File

@ -33,8 +33,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
* The timer won't start tracking time until calling {@link #start()}. If {@link #complete()} or * The timer won't start tracking time until calling {@link #start()}. If {@link #complete()} or
* {@link #trigger()} is called before {@link #start()}, calls to {@link #start()} will fail. * {@link #trigger()} is called before {@link #start()}, calls to {@link #start()} will fail.
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Evolving
public class TimeoutExceptionInjector { public class TimeoutExceptionInjector {
private static final Log LOG = LogFactory.getLog(TimeoutExceptionInjector.class); private static final Log LOG = LogFactory.getLog(TimeoutExceptionInjector.class);

View File

@ -28,7 +28,7 @@ import org.apache.hadoop.classification.InterfaceAudience;
* Map from type T to int and vice-versa. Used for reducing bit field item * Map from type T to int and vice-versa. Used for reducing bit field item
* counts. * counts.
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
public final class UniqueIndexMap<T> implements Serializable { public final class UniqueIndexMap<T> implements Serializable {
private static final long serialVersionUID = -1145635738654002342L; private static final long serialVersionUID = -1145635738654002342L;

View File

@ -18,6 +18,8 @@
*/ */
package org.apache.hadoop.hbase.mapred; package org.apache.hadoop.hbase.mapred;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.util.ProgramDriver; import org.apache.hadoop.util.ProgramDriver;
/** /**
@ -25,6 +27,8 @@ import org.apache.hadoop.util.ProgramDriver;
* name of job to this main. * name of job to this main.
*/ */
@Deprecated @Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class Driver { public class Driver {
/** /**
* @param args * @param args

View File

@ -21,6 +21,8 @@ package org.apache.hadoop.hbase.mapred;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
@ -37,6 +39,8 @@ import org.apache.hadoop.mapred.Reporter;
* Extract grouping columns from input record * Extract grouping columns from input record
*/ */
@Deprecated @Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class GroupingTableMap public class GroupingTableMap
extends MapReduceBase extends MapReduceBase
implements TableMap<ImmutableBytesWritable,Result> { implements TableMap<ImmutableBytesWritable,Result> {

View File

@ -22,6 +22,8 @@ import java.io.IOException;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
@ -39,6 +41,8 @@ import org.apache.hadoop.mapred.Partitioner;
* @param <V2> * @param <V2>
*/ */
@Deprecated @Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class HRegionPartitioner<K2,V2> public class HRegionPartitioner<K2,V2>
implements Partitioner<ImmutableBytesWritable, V2> { implements Partitioner<ImmutableBytesWritable, V2> {
private final Log LOG = LogFactory.getLog(TableInputFormat.class); private final Log LOG = LogFactory.getLog(TableInputFormat.class);

View File

@ -20,6 +20,8 @@ package org.apache.hadoop.hbase.mapred;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.JobConf;
@ -31,6 +33,8 @@ import org.apache.hadoop.mapred.Reporter;
* Pass the given key and record as-is to reduce * Pass the given key and record as-is to reduce
*/ */
@Deprecated @Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class IdentityTableMap public class IdentityTableMap
extends MapReduceBase extends MapReduceBase
implements TableMap<ImmutableBytesWritable, Result> { implements TableMap<ImmutableBytesWritable, Result> {

View File

@ -23,6 +23,8 @@ import java.util.Iterator;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.mapred.MapReduceBase; import org.apache.hadoop.mapred.MapReduceBase;
@ -33,6 +35,8 @@ import org.apache.hadoop.mapred.Reporter;
* Write to table each key, record pair * Write to table each key, record pair
*/ */
@Deprecated @Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class IdentityTableReduce public class IdentityTableReduce
extends MapReduceBase extends MapReduceBase
implements TableReduce<ImmutableBytesWritable, Put> { implements TableReduce<ImmutableBytesWritable, Put> {

View File

@ -19,14 +19,14 @@
package org.apache.hadoop.hbase.mapred; package org.apache.hadoop.hbase.mapred;
import java.io.IOException; import java.io.IOException;
import java.util.Map;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configured; import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.mapred.FileOutputFormat; import org.apache.hadoop.mapred.FileOutputFormat;
import org.apache.hadoop.mapred.JobClient; import org.apache.hadoop.mapred.JobClient;
import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.JobConf;
@ -42,6 +42,8 @@ import org.apache.hadoop.util.ToolRunner;
* Uses an {@link IdentityReducer} * Uses an {@link IdentityReducer}
*/ */
@Deprecated @Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class RowCounter extends Configured implements Tool { public class RowCounter extends Configured implements Tool {
// Name of this 'program' // Name of this 'program'
static final String NAME = "rowcounter"; static final String NAME = "rowcounter";

View File

@ -22,6 +22,8 @@ import java.io.IOException;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.HTable;
@ -35,6 +37,8 @@ import org.apache.hadoop.util.StringUtils;
* Convert HBase tabular data into a format that is consumable by Map/Reduce. * Convert HBase tabular data into a format that is consumable by Map/Reduce.
*/ */
@Deprecated @Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class TableInputFormat extends TableInputFormatBase implements public class TableInputFormat extends TableInputFormatBase implements
JobConfigurable { JobConfigurable {
private final Log LOG = LogFactory.getLog(TableInputFormat.class); private final Log LOG = LogFactory.getLog(TableInputFormat.class);

View File

@ -22,6 +22,8 @@ import java.io.IOException;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
@ -64,6 +66,8 @@ import org.apache.hadoop.mapred.Reporter;
*/ */
@Deprecated @Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public abstract class TableInputFormatBase public abstract class TableInputFormatBase
implements InputFormat<ImmutableBytesWritable, Result> { implements InputFormat<ImmutableBytesWritable, Result> {
final Log LOG = LogFactory.getLog(TableInputFormatBase.class); final Log LOG = LogFactory.getLog(TableInputFormatBase.class);

View File

@ -18,9 +18,10 @@
*/ */
package org.apache.hadoop.hbase.mapred; package org.apache.hadoop.hbase.mapred;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.Mapper; import org.apache.hadoop.mapred.Mapper;
@ -32,6 +33,8 @@ import org.apache.hadoop.mapred.Mapper;
* @param <V> Writable value class * @param <V> Writable value class
*/ */
@Deprecated @Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public interface TableMap<K extends WritableComparable<? super K>, V> public interface TableMap<K extends WritableComparable<? super K>, V>
extends Mapper<ImmutableBytesWritable, Result, K, V> { extends Mapper<ImmutableBytesWritable, Result, K, V> {

View File

@ -20,6 +20,8 @@ package org.apache.hadoop.hbase.mapred;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.catalog.MetaReader; import org.apache.hadoop.hbase.catalog.MetaReader;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
@ -39,6 +41,8 @@ import org.apache.hadoop.mapred.jobcontrol.Job;
* Utility for {@link TableMap} and {@link TableReduce} * Utility for {@link TableMap} and {@link TableReduce}
*/ */
@Deprecated @Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public class TableMapReduceUtil { public class TableMapReduceUtil {

View File

@ -22,6 +22,8 @@ import java.io.IOException;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.HTable;
@ -39,6 +41,8 @@ import org.apache.hadoop.util.Progressable;
* Convert Map/Reduce output and write it to an HBase table * Convert Map/Reduce output and write it to an HBase table
*/ */
@Deprecated @Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class TableOutputFormat extends public class TableOutputFormat extends
FileOutputFormat<ImmutableBytesWritable, Put> { FileOutputFormat<ImmutableBytesWritable, Put> {

View File

@ -20,6 +20,8 @@ package org.apache.hadoop.hbase.mapred;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.filter.Filter; import org.apache.hadoop.hbase.filter.Filter;
@ -30,6 +32,9 @@ import org.apache.hadoop.mapred.RecordReader;
/** /**
* Iterate over an HBase table data, return (Text, RowResult) pairs * Iterate over an HBase table data, return (Text, RowResult) pairs
*/ */
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class TableRecordReader public class TableRecordReader
implements RecordReader<ImmutableBytesWritable, Result> { implements RecordReader<ImmutableBytesWritable, Result> {

View File

@ -22,6 +22,8 @@ import java.io.IOException;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Result;
@ -39,6 +41,9 @@ import static org.apache.hadoop.hbase.mapreduce.TableRecordReaderImpl.LOG_PER_RO
/** /**
* Iterate over an HBase table data, return (Text, RowResult) pairs * Iterate over an HBase table data, return (Text, RowResult) pairs
*/ */
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class TableRecordReaderImpl { public class TableRecordReaderImpl {
static final Log LOG = LogFactory.getLog(TableRecordReaderImpl.class); static final Log LOG = LogFactory.getLog(TableRecordReaderImpl.class);

View File

@ -18,9 +18,10 @@
*/ */
package org.apache.hadoop.hbase.mapred; package org.apache.hadoop.hbase.mapred;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.mapred.Reducer; import org.apache.hadoop.mapred.Reducer;
@ -31,6 +32,8 @@ import org.apache.hadoop.mapred.Reducer;
* @param <V> value class * @param <V> value class
*/ */
@Deprecated @Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public interface TableReduce<K extends WritableComparable, V> public interface TableReduce<K extends WritableComparable, V>
extends Reducer<K, V, ImmutableBytesWritable, Put> { extends Reducer<K, V, ImmutableBytesWritable, Put> {

View File

@ -22,6 +22,8 @@ import java.io.DataInput;
import java.io.DataOutput; import java.io.DataOutput;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
@ -31,6 +33,8 @@ import org.apache.hadoop.mapred.InputSplit;
* A table split corresponds to a key range [low, high) * A table split corresponds to a key range [low, high)
*/ */
@Deprecated @Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class TableSplit implements InputSplit, Comparable<TableSplit> { public class TableSplit implements InputSplit, Comparable<TableSplit> {
private TableName m_tableName; private TableName m_tableName;
private byte [] m_startRow; private byte [] m_startRow;

View File

@ -45,7 +45,7 @@ import org.apache.hadoop.hbase.Stoppable;
* *
* <p>This classes produces plans for the {@link AssignmentManager} to execute. * <p>This classes produces plans for the {@link AssignmentManager} to execute.
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
public interface LoadBalancer extends Configurable, Stoppable { public interface LoadBalancer extends Configurable, Stoppable {
/** /**

View File

@ -27,7 +27,6 @@ import java.util.concurrent.TimeUnit;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.errorhandling.ForeignException; import org.apache.hadoop.hbase.errorhandling.ForeignException;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionListener; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionListener;
@ -65,8 +64,7 @@ import com.google.common.collect.Lists;
* for them implicitly via {@link ProcedureCoordinator#startProcedure(ForeignExceptionDispatcher, * for them implicitly via {@link ProcedureCoordinator#startProcedure(ForeignExceptionDispatcher,
* String, byte[], List)}} * String, byte[], List)}}
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Evolving
public class Procedure implements Callable<Void>, ForeignExceptionListener { public class Procedure implements Callable<Void>, ForeignExceptionListener {
private static final Log LOG = LogFactory.getLog(Procedure.class); private static final Log LOG = LogFactory.getLog(Procedure.class);

View File

@ -33,7 +33,6 @@ import java.util.concurrent.TimeUnit;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.DaemonThreadFactory; import org.apache.hadoop.hbase.DaemonThreadFactory;
import org.apache.hadoop.hbase.errorhandling.ForeignException; import org.apache.hadoop.hbase.errorhandling.ForeignException;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
@ -46,8 +45,7 @@ import com.google.common.collect.MapMaker;
* The {@link Procedure} is generic and subclassing or customization shouldn't be * The {@link Procedure} is generic and subclassing or customization shouldn't be
* necessary -- any customization should happen just in {@link Subprocedure}s. * necessary -- any customization should happen just in {@link Subprocedure}s.
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Evolving
public class ProcedureCoordinator { public class ProcedureCoordinator {
private static final Log LOG = LogFactory.getLog(ProcedureCoordinator.class); private static final Log LOG = LogFactory.getLog(ProcedureCoordinator.class);

View File

@ -22,7 +22,6 @@ import java.io.IOException;
import java.util.List; import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.errorhandling.ForeignException; import org.apache.hadoop.hbase.errorhandling.ForeignException;
/** /**
@ -30,8 +29,7 @@ import org.apache.hadoop.hbase.errorhandling.ForeignException;
* distributed members. * distributed members.
* @see ProcedureCoordinator * @see ProcedureCoordinator
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Evolving
public interface ProcedureCoordinatorRpcs extends Closeable { public interface ProcedureCoordinatorRpcs extends Closeable {
/** /**

View File

@ -31,7 +31,6 @@ import java.util.concurrent.TimeUnit;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.DaemonThreadFactory; import org.apache.hadoop.hbase.DaemonThreadFactory;
import org.apache.hadoop.hbase.errorhandling.ForeignException; import org.apache.hadoop.hbase.errorhandling.ForeignException;
@ -46,8 +45,7 @@ import com.google.common.collect.MapMaker;
* currently running subprocedures are notify to failed since there is no longer a way to reach any * currently running subprocedures are notify to failed since there is no longer a way to reach any
* other members or coordinators since the rpcs are down. * other members or coordinators since the rpcs are down.
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Evolving
public class ProcedureMember implements Closeable { public class ProcedureMember implements Closeable {
private static final Log LOG = LogFactory.getLog(ProcedureMember.class); private static final Log LOG = LogFactory.getLog(ProcedureMember.class);

View File

@ -28,8 +28,7 @@ import org.apache.hadoop.hbase.errorhandling.ForeignException;
* This is the notification interface for Procedures that encapsulates message passing from * This is the notification interface for Procedures that encapsulates message passing from
* members to a coordinator. Each of these calls should send a message to the coordinator. * members to a coordinator. Each of these calls should send a message to the coordinator.
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Evolving
public interface ProcedureMemberRpcs extends Closeable { public interface ProcedureMemberRpcs extends Closeable {
/** /**

View File

@ -18,13 +18,11 @@
package org.apache.hadoop.hbase.procedure; package org.apache.hadoop.hbase.procedure;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/** /**
* Task builder to build instances of a {@link ProcedureMember}'s {@link Subprocedure}s. * Task builder to build instances of a {@link ProcedureMember}'s {@link Subprocedure}s.
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Evolving
public interface SubprocedureFactory { public interface SubprocedureFactory {
/** /**

View File

@ -36,8 +36,7 @@ import com.google.protobuf.InvalidProtocolBufferException;
/** /**
* ZooKeeper based {@link ProcedureCoordinatorRpcs} for a {@link ProcedureCoordinator} * ZooKeeper based {@link ProcedureCoordinatorRpcs} for a {@link ProcedureCoordinator}
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Evolving
public class ZKProcedureCoordinatorRpcs implements ProcedureCoordinatorRpcs { public class ZKProcedureCoordinatorRpcs implements ProcedureCoordinatorRpcs {
public static final Log LOG = LogFactory.getLog(ZKProcedureCoordinatorRpcs.class); public static final Log LOG = LogFactory.getLog(ZKProcedureCoordinatorRpcs.class);
private ZKProcedureUtil zkProc = null; private ZKProcedureUtil zkProc = null;

View File

@ -24,7 +24,6 @@ import java.util.List;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.errorhandling.ForeignException; import org.apache.hadoop.hbase.errorhandling.ForeignException;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
@ -52,8 +51,7 @@ import com.google.protobuf.InvalidProtocolBufferException;
* its re-initialization will delete the znodes and require all in progress subprocedures to start * its re-initialization will delete the znodes and require all in progress subprocedures to start
* anew. * anew.
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Evolving
public class ZKProcedureMemberRpcs implements ProcedureMemberRpcs { public class ZKProcedureMemberRpcs implements ProcedureMemberRpcs {
private static final Log LOG = LogFactory.getLog(ZKProcedureMemberRpcs.class); private static final Log LOG = LogFactory.getLog(ZKProcedureMemberRpcs.class);

View File

@ -24,7 +24,6 @@ import java.util.List;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.zookeeper.ZKUtil; import org.apache.hadoop.hbase.zookeeper.ZKUtil;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperListener; import org.apache.hadoop.hbase.zookeeper.ZooKeeperListener;
import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher; import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
@ -48,8 +47,7 @@ import org.apache.zookeeper.KeeperException;
* *
* Assumption here that procedure names are unique * Assumption here that procedure names are unique
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Evolving
public abstract class ZKProcedureUtil public abstract class ZKProcedureUtil
extends ZooKeeperListener implements Closeable { extends ZooKeeperListener implements Closeable {

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.coprocessor.RegionObserver; import org.apache.hadoop.hbase.coprocessor.RegionObserver;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
@ -29,8 +29,7 @@ import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
* @see RegionObserver#postBatchMutate(ObserverContext, MiniBatchOperationInProgress) * @see RegionObserver#postBatchMutate(ObserverContext, MiniBatchOperationInProgress)
* @param <T> Pair<Mutation, Integer> pair of Mutations and associated rowlock ids . * @param <T> Pair<Mutation, Integer> pair of Mutations and associated rowlock ids .
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Evolving
public class MiniBatchOperationInProgress<T> { public class MiniBatchOperationInProgress<T> {
private final T[] operations; private final T[] operations;
private final OperationStatus[] retCodeDetails; private final OperationStatus[] retCodeDetails;

View File

@ -23,15 +23,13 @@ import java.util.List;
import java.util.UUID; import java.util.UUID;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.regionserver.wal.WALEdit; import org.apache.hadoop.hbase.regionserver.wal.WALEdit;
import com.google.protobuf.Message; import com.google.protobuf.Message;
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Evolving
/** /**
* Defines the procedure to atomically perform multiple scans and mutations * Defines the procedure to atomically perform multiple scans and mutations

View File

@ -22,9 +22,8 @@ import java.util.Collection;
import java.util.Map; import java.util.Map;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.User;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
@ -32,8 +31,7 @@ import org.apache.hadoop.hbase.util.Bytes;
* Represents the result of an authorization check for logging and error * Represents the result of an authorization check for logging and error
* reporting. * reporting.
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Evolving
public class AuthResult { public class AuthResult {
private final boolean allowed; private final boolean allowed;
private final String namespace; private final String namespace;

View File

@ -31,7 +31,7 @@ import org.apache.hadoop.classification.InterfaceStability;
import com.google.common.base.Preconditions; import com.google.common.base.Preconditions;
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Evolving @InterfaceStability.Evolving
public class DirectMemoryUtils { public class DirectMemoryUtils {
/** /**

View File

@ -85,8 +85,7 @@ import com.google.protobuf.InvalidProtocolBufferException;
/** /**
* Utility methods for interacting with the underlying file system. * Utility methods for interacting with the underlying file system.
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Evolving
public abstract class FSUtils { public abstract class FSUtils {
private static final Log LOG = LogFactory.getLog(FSUtils.class); private static final Log LOG = LogFactory.getLog(FSUtils.class);

View File

@ -25,7 +25,6 @@ import java.util.Map;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
@ -48,8 +47,7 @@ import org.apache.zookeeper.KeeperException;
* This class contains helper methods that repair parts of hbase's filesystem * This class contains helper methods that repair parts of hbase's filesystem
* contents. * contents.
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Evolving
public class HBaseFsckRepair { public class HBaseFsckRepair {
public static final Log LOG = LogFactory.getLog(HBaseFsckRepair.class); public static final Log LOG = LogFactory.getLog(HBaseFsckRepair.class);

View File

@ -27,7 +27,7 @@ import org.apache.hadoop.classification.InterfaceStability;
* This class encapsulates a byte array and overrides hashCode and equals so * This class encapsulates a byte array and overrides hashCode and equals so
* that it's identity is based on the data rather than the array instance. * that it's identity is based on the data rather than the array instance.
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Stable @InterfaceStability.Stable
public class HashedBytes { public class HashedBytes {

View File

@ -29,7 +29,7 @@ import org.apache.hadoop.classification.InterfaceStability;
* class has a utility function to determine whether the current JVM * class has a utility function to determine whether the current JVM
* is known to be unstable. * is known to be unstable.
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Stable @InterfaceStability.Stable
public abstract class JvmVersion { public abstract class JvmVersion {
private static Set<String> BAD_JVM_VERSIONS = new HashSet<String>(); private static Set<String> BAD_JVM_VERSIONS = new HashSet<String>();

View File

@ -45,8 +45,7 @@ import org.apache.hadoop.classification.InterfaceStability;
* @param <E> A class implementing {@link java.lang.Comparable} or able to be * @param <E> A class implementing {@link java.lang.Comparable} or able to be
* compared by a provided comparator. * compared by a provided comparator.
*/ */
@InterfaceAudience.Public @InterfaceAudience.Private
@InterfaceStability.Stable
public class SortedCopyOnWriteSet<E> implements SortedSet<E> { public class SortedCopyOnWriteSet<E> implements SortedSet<E> {
private volatile SortedSet<E> internalSet; private volatile SortedSet<E> internalSet;