HBASE-20212 Make all Public classes have InterfaceAudience category

Signed-off-by: tedyu <yuzhihong@gmail.com>
Signed-off-by: Michael Stack <stack@apache.org>
This commit is contained in:
Chia-Ping Tsai 2018-03-22 09:24:14 +08:00
parent 0a94c9c250
commit dd9e46bbf5
188 changed files with 611 additions and 737 deletions

View File

@ -47,6 +47,10 @@
<failOnViolation>true</failOnViolation> <failOnViolation>true</failOnViolation>
</configuration> </configuration>
</plugin> </plugin>
<plugin>
<groupId>net.revelc.code</groupId>
<artifactId>warbucks-maven-plugin</artifactId>
</plugin>
</plugins> </plugins>
</build> </build>
</project> </project>

View File

@ -47,6 +47,10 @@
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId> <artifactId>maven-source-plugin</artifactId>
</plugin> </plugin>
<plugin>
<groupId>net.revelc.code</groupId>
<artifactId>warbucks-maven-plugin</artifactId>
</plugin>
</plugins> </plugins>
<pluginManagement> <pluginManagement>
<plugins> <plugins>

View File

@ -1,540 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import java.io.IOException;
import java.lang.annotation.Annotation;
import java.lang.reflect.Constructor;
import java.lang.reflect.Method;
import java.lang.reflect.Modifier;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import java.util.Set;
import org.apache.hadoop.hbase.ClassFinder.And;
import org.apache.hadoop.hbase.ClassFinder.FileNameFilter;
import org.apache.hadoop.hbase.ClassFinder.Not;
import org.apache.hadoop.hbase.ClassTestFinder.TestClassFilter;
import org.apache.hadoop.hbase.ClassTestFinder.TestFileNameFilter;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Pair;
import org.apache.hadoop.hbase.util.Triple;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
import org.junit.Assert;
import org.junit.ClassRule;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Test cases for ensuring our client visible classes have annotations for
* {@link InterfaceAudience}.
* <p>
* All classes in hbase-client and hbase-common module MUST have InterfaceAudience annotations.
* Think twice about marking an interface InterfaceAudience.Public. Make sure that it is an
* interface, not a class (for most cases), and clients will actually depend on it. Once something
* is marked with Public, we cannot change the signatures within the major release. NOT everything
* in the hbase-client module or every java public class has to be marked with
* InterfaceAudience.Public. ONLY the ones that an hbase application will directly use (Table, Get,
* etc, versus ProtobufUtil). And also, InterfaceAudience.Public annotated classes MUST NOT have
* InterfaceStability annotations. The stability of these classes only depends on versioning.
* <p>
* All classes which are marked as InterfaceAudience.LimitedPrivate MUST also have
* InterfaceStability annotations. The only exception is HBaseInterfaceAudience.CONFIG. It is used
* to indicate that the class name will be exposed in user facing configuration files.
* <p>
* Also note that HBase has it's own annotations in hbase-annotations module with the same names as
* in Hadoop. You should use the HBase's classes.
* <p>
* See
* https://hadoop.apache.org/docs/current/hadoop-project-dist/hadoop-common/InterfaceClassification.html
* and https://issues.apache.org/jira/browse/HBASE-10462.
*/
@Category(SmallTests.class)
public class TestInterfaceAudienceAnnotations {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestInterfaceAudienceAnnotations.class);
private static final String HBASE_PROTOBUF = "org.apache.hadoop.hbase.protobuf.generated";
private static final Logger LOG = LoggerFactory.getLogger(TestInterfaceAudienceAnnotations.class);
/** Selects classes with generated in their package name */
static class GeneratedClassFilter implements ClassFinder.ClassFilter {
@Override
public boolean isCandidateClass(Class<?> c) {
return c.getPackage().getName().contains("generated");
}
}
static class ShadedProtobufClassFilter implements ClassFinder.ClassFilter {
@Override
public boolean isCandidateClass(Class<?> c) {
return c.getPackage().getName().
contains("org.apache.hbase.thirdparty.com.google.protobuf");
}
}
/** Selects classes with one of the {@link InterfaceAudience} annotation in their class
* declaration.
*/
class InterfaceAudienceAnnotatedClassFilter implements ClassFinder.ClassFilter {
@Override
public boolean isCandidateClass(Class<?> c) {
if (getAnnotation(c) != null) {
// class itself has a declared annotation.
return true;
}
// If this is an internal class, look for the encapsulating class to see whether it has
// annotation. All inner classes of private classes are considered annotated.
return isAnnotatedPrivate(c.getEnclosingClass());
}
private boolean isAnnotatedPrivate(Class<?> c) {
if (c == null) {
return false;
}
Annotation ann = getAnnotation(c);
if (ann != null && !InterfaceAudience.Public.class.equals(ann.annotationType())) {
return true;
}
return isAnnotatedPrivate(c.getEnclosingClass());
}
protected Annotation getAnnotation(Class<?> c) {
// we should get only declared annotations, not inherited ones
Annotation[] anns = c.getDeclaredAnnotations();
for (Annotation ann : anns) {
// Hadoop clearly got it wrong for not making the annotation values (private, public, ..)
// an enum instead we have three independent annotations!
Class<?> type = ann.annotationType();
if (isInterfaceAudienceClass(type)) {
return ann;
}
}
return null;
}
}
/** Selects classes with one of the {@link InterfaceStability} annotation in their class
* declaration.
*/
class InterfaceStabilityAnnotatedClassFilter implements ClassFinder.ClassFilter {
@Override
public boolean isCandidateClass(Class<?> c) {
if (getAnnotation(c) != null) {
// class itself has a declared annotation.
return true;
}
return false;
}
protected Class<?> getAnnotation(Class<?> c) {
// we should get only declared annotations, not inherited ones
Annotation[] anns = c.getDeclaredAnnotations();
for (Annotation ann : anns) {
// Hadoop clearly got it wrong for not making the annotation values (private, public, ..)
// an enum instead we have three independent annotations!
Class<?> type = ann.annotationType();
if (isInterfaceStabilityClass(type)) {
return type;
}
}
return null;
}
}
/**
* Selects classes with one of the {@link InterfaceAudience.Public} annotation in their class
* declaration.
*/
class InterfaceAudiencePublicAnnotatedClassFilter extends InterfaceAudienceAnnotatedClassFilter {
@Override
public boolean isCandidateClass(Class<?> c) {
Annotation ann = getAnnotation(c);
return ann != null && InterfaceAudience.Public.class.equals(ann.annotationType());
}
}
/**
* Selects classes with one of the {@link InterfaceAudience.LimitedPrivate} annotation in their
* class declaration.
*/
class InterfaceAudienceLimitedPrivateAnnotatedNotConfigClassFilter
extends InterfaceAudienceAnnotatedClassFilter {
@Override
public boolean isCandidateClass(Class<?> c) {
Annotation ann = getAnnotation(c);
if (ann == null || !InterfaceAudience.LimitedPrivate.class.equals(ann.annotationType())) {
return false;
}
InterfaceAudience.LimitedPrivate iaAnn = (InterfaceAudience.LimitedPrivate) ann;
return iaAnn.value().length == 0 || !HBaseInterfaceAudience.CONFIG.equals(iaAnn.value()[0]);
}
}
/**
* Selects InterfaceAudience or InterfaceStability classes. Don't go meta!!!
*/
class IsInterfaceStabilityClassFilter implements ClassFinder.ClassFilter {
@Override
public boolean isCandidateClass(Class<?> c) {
return
isInterfaceAudienceClass(c) ||
isInterfaceStabilityClass(c);
}
}
private boolean isInterfaceAudienceClass(Class<?> c) {
return
c.equals(InterfaceAudience.Public.class) ||
c.equals(InterfaceAudience.Private.class) ||
c.equals(InterfaceAudience.LimitedPrivate.class);
}
private boolean isInterfaceStabilityClass(Class<?> c) {
return
c.equals(InterfaceStability.Stable.class) ||
c.equals(InterfaceStability.Unstable.class) ||
c.equals(InterfaceStability.Evolving.class);
}
private boolean isInterfacePrivateMethod(Method m) {
if(m.getDeclaredAnnotations().length > 0) {
for(Annotation ann : m.getDeclaredAnnotations()) {
if(ann.annotationType().equals(InterfaceAudience.Private.class)) {
return true;
}
}
}
return false;
}
private boolean isInterfacePrivateContructor(Constructor<?> c) {
if(c.getDeclaredAnnotations().length > 0) {
for(Annotation ann : c.getDeclaredAnnotations()) {
if(ann.annotationType().equals(InterfaceAudience.Private.class)) {
return true;
}
}
}
return false;
}
/** Selects classes that are declared public */
static class PublicClassFilter implements ClassFinder.ClassFilter {
@Override
public boolean isCandidateClass(Class<?> c) {
int mod = c.getModifiers();
return Modifier.isPublic(mod);
}
}
/** Selects paths (jars and class dirs) only from the main code, not test classes */
static class MainCodeResourcePathFilter implements ClassFinder.ResourcePathFilter {
@Override
public boolean isCandidatePath(String resourcePath, boolean isJar) {
return !resourcePath.contains("test-classes") &&
!resourcePath.contains("tests.jar");
}
}
/**
* Selects classes that appear to be source instrumentation from Clover.
* Clover generates instrumented code in order to calculate coverage. Part of the
* generated source is a static inner class on each source class.
*
* - has an enclosing class
* - enclosing class is not an interface
* - name starts with "__CLR"
*/
static class CloverInstrumentationFilter implements ClassFinder.ClassFilter {
@Override
public boolean isCandidateClass(Class<?> clazz) {
boolean clover = false;
final Class<?> enclosing = clazz.getEnclosingClass();
if (enclosing != null) {
if (!(enclosing.isInterface())) {
clover = clazz.getSimpleName().startsWith("__CLR");
}
}
return clover;
}
}
/**
* Checks whether all the classes in client and common modules contain
* {@link InterfaceAudience} annotations.
*/
@Ignore @Test
public void testInterfaceAudienceAnnotation()
throws ClassNotFoundException, IOException, LinkageError {
// find classes that are:
// In the main jar
// AND are not in a hadoop-compat module
// AND are public
// NOT test classes
// AND NOT generated classes
// AND are NOT annotated with InterfaceAudience
// AND are NOT from Clover rewriting sources
ClassFinder classFinder = new ClassFinder(
new And(new MainCodeResourcePathFilter(),
new TestFileNameFilter()),
new Not((FileNameFilter)new TestFileNameFilter()),
new And(new PublicClassFilter(),
new Not(new TestClassFilter()),
new Not(new GeneratedClassFilter()),
new Not(new ShadedProtobufClassFilter()),
new Not(new IsInterfaceStabilityClassFilter()),
new Not(new InterfaceAudienceAnnotatedClassFilter()),
new Not(new CloverInstrumentationFilter()))
);
Set<Class<?>> classes = classFinder.findClasses(false);
if (!classes.isEmpty()) {
LOG.info("These are the classes that DO NOT have @InterfaceAudience annotation:");
for (Class<?> clazz : classes) {
LOG.info(Objects.toString(clazz));
}
}
Assert.assertEquals("All classes should have @InterfaceAudience annotation",
0, classes.size());
}
/**
* Checks whether all the classes in client and common modules that are marked
* InterfaceAudience.Public do not have {@link InterfaceStability} annotations.
*/
@Ignore @Test
public void testNoInterfaceStabilityAnnotationForPublicAPI()
throws ClassNotFoundException, IOException, LinkageError {
// find classes that are:
// In the main jar
// AND are not in a hadoop-compat module
// AND are public
// NOT test classes
// AND NOT generated classes
// AND are annotated with InterfaceAudience.Public
// AND annotated with InterfaceStability
ClassFinder classFinder = new ClassFinder(
new And(new MainCodeResourcePathFilter(),
new TestFileNameFilter()),
new Not((FileNameFilter)new TestFileNameFilter()),
new And(new PublicClassFilter(),
new Not(new TestClassFilter()),
new Not(new GeneratedClassFilter()),
new Not(new ShadedProtobufClassFilter()),
new InterfaceAudiencePublicAnnotatedClassFilter(),
new Not(new IsInterfaceStabilityClassFilter()),
new InterfaceStabilityAnnotatedClassFilter())
);
Set<Class<?>> classes = classFinder.findClasses(false);
if (!classes.isEmpty()) {
LOG.info("These are the @InterfaceAudience.Public classes that have @InterfaceStability " +
"annotation:");
for (Class<?> clazz : classes) {
LOG.info(Objects.toString(clazz));
}
}
Assert.assertEquals("All classes that are marked with @InterfaceAudience.Public should not "
+ "have @InterfaceStability annotation",
0, classes.size());
}
/**
* Checks whether all the classes in client and common modules that are marked
* InterfaceAudience.Public do not have {@link InterfaceStability} annotations.
*/
@Ignore
@Test
public void testInterfaceStabilityAnnotationForLimitedAPI()
throws ClassNotFoundException, IOException, LinkageError {
// find classes that are:
// In the main jar
// AND are not in a hadoop-compat module
// AND are public
// NOT test classes
// AND NOT generated classes
// AND are annotated with InterfaceAudience.LimitedPrivate
// AND NOT annotated with InterfaceStability
ClassFinder classFinder = new ClassFinder(
new And(new MainCodeResourcePathFilter(),
new TestFileNameFilter()),
new Not((FileNameFilter)new TestFileNameFilter()),
new And(new PublicClassFilter(),
new Not(new TestClassFilter()),
new Not(new GeneratedClassFilter()),
new Not(new ShadedProtobufClassFilter()),
new InterfaceAudienceLimitedPrivateAnnotatedNotConfigClassFilter(),
new Not(new IsInterfaceStabilityClassFilter()),
new Not(new InterfaceStabilityAnnotatedClassFilter()))
);
Set<Class<?>> classes = classFinder.findClasses(false);
if (!classes.isEmpty()) {
LOG.info("These are the @InterfaceAudience.LimitedPrivate classes that DO NOT " +
"have @InterfaceStability annotation:");
for (Class<?> clazz : classes) {
LOG.info(Objects.toString(clazz));
}
}
Assert.assertEquals("All classes that are marked with @InterfaceAudience.LimitedPrivate " +
"should have @InterfaceStability annotation",
0, classes.size());
}
@Ignore @Test
public void testProtosInReturnTypes() throws ClassNotFoundException, IOException, LinkageError {
Set<Class<?>> classes = findPublicClasses();
List<Pair<Class<?>, Method>> protosReturnType = new ArrayList<>();
for (Class<?> clazz : classes) {
findProtoInReturnType(clazz, protosReturnType);
}
if (protosReturnType.size() != 0) {
LOG.info("These are the methods that have Protos as the return type");
for (Pair<Class<?>, Method> pair : protosReturnType) {
LOG.info(pair.getFirst().getName() + " " + pair.getSecond().getName() + " "
+ pair.getSecond().getReturnType().getName());
}
}
Assert.assertEquals("Public exposed methods should not have protos in return type", 0,
protosReturnType.size());
}
private Set<Class<?>> findPublicClasses()
throws ClassNotFoundException, IOException, LinkageError {
ClassFinder classFinder =
new ClassFinder(new And(new MainCodeResourcePathFilter(), new TestFileNameFilter()),
new Not((FileNameFilter) new TestFileNameFilter()),
new And(new PublicClassFilter(), new Not(new TestClassFilter()),
new Not(new GeneratedClassFilter()),
new Not(new ShadedProtobufClassFilter()),
new InterfaceAudiencePublicAnnotatedClassFilter()));
Set<Class<?>> classes = classFinder.findClasses(false);
return classes;
}
@Ignore @Test
public void testProtosInParamTypes() throws ClassNotFoundException, IOException, LinkageError {
Set<Class<?>> classes = findPublicClasses();
List<Triple<Class<?>, Method, Class<?>>> protosParamType = new ArrayList<>();
for (Class<?> clazz : classes) {
findProtoInParamType(clazz, protosParamType);
}
if (protosParamType.size() != 0) {
LOG.info("These are the methods that have Protos as the param type");
for (Triple<Class<?>, Method, Class<?>> pair : protosParamType) {
LOG.info(pair.getFirst().getName() + " " + pair.getSecond().getName() + " "
+ pair.getThird().getName());
}
}
Assert.assertEquals("Public exposed methods should not have protos in param type", 0,
protosParamType.size());
}
@Ignore @Test
public void testProtosInConstructors() throws ClassNotFoundException, IOException, LinkageError {
Set<Class<?>> classes = findPublicClasses();
List<Class<?>> classList = new ArrayList<>();
for (Class<?> clazz : classes) {
Constructor<?>[] constructors = clazz.getConstructors();
for (Constructor<?> cons : constructors) {
if (!isInterfacePrivateContructor(cons)) {
Class<?>[] parameterTypes = cons.getParameterTypes();
for (Class<?> param : parameterTypes) {
if (param.getName().contains(HBASE_PROTOBUF)) {
classList.add(clazz);
break;
}
}
}
}
}
if (classList.size() != 0) {
LOG.info("These are the classes that have Protos in the constructor");
for (Class<?> clazz : classList) {
LOG.info(clazz.getName());
}
}
Assert.assertEquals("Public exposed classes should not have protos in constructors", 0,
classList.size());
}
private void findProtoInReturnType(Class<?> clazz,
List<Pair<Class<?>, Method>> protosReturnType) {
Pair<Class<?>, Method> returnTypePair = new Pair<>();
Method[] methods = clazz.getMethods();
returnTypePair.setFirst(clazz);
for (Method method : methods) {
if (clazz.isInterface() || method.getModifiers() == Modifier.PUBLIC) {
if (!isInterfacePrivateMethod(method)) {
Class<?> returnType = method.getReturnType();
if (returnType.getName().contains(HBASE_PROTOBUF)) {
returnTypePair.setSecond(method);
protosReturnType.add(returnTypePair);
continue;
}
}
}
}
}
private void findProtoInParamType(Class<?> clazz,
List<Triple<Class<?>, Method, Class<?>>> protosParamType) {
Triple<Class<?>, Method, Class<?>> paramType = new Triple<>();
Method[] methods = clazz.getMethods();
paramType.setFirst(clazz);
for (Method method : methods) {
if (clazz.isInterface() || method.getModifiers() == Modifier.PUBLIC) {
if (!isInterfacePrivateMethod(method)) {
Class<?>[] parameters = method.getParameterTypes();
for (Class<?> param : parameters) {
if (param.getName().contains(HBASE_PROTOBUF)) {
paramType.setSecond(method);
paramType.setThird(param);
protosParamType.add(paramType);
break;
}
}
}
}
}
}
}

View File

@ -132,6 +132,10 @@
</excludes> </excludes>
</configuration> </configuration>
</plugin> </plugin>
<plugin>
<groupId>net.revelc.code</groupId>
<artifactId>warbucks-maven-plugin</artifactId>
</plugin>
</plugins> </plugins>
<pluginManagement> <pluginManagement>
<plugins> <plugins>

View File

@ -24,10 +24,12 @@ import org.apache.htrace.core.Span;
import org.apache.htrace.core.SpanReceiver; import org.apache.htrace.core.SpanReceiver;
import org.apache.htrace.core.TraceScope; import org.apache.htrace.core.TraceScope;
import org.apache.htrace.core.Tracer; import org.apache.htrace.core.Tracer;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* This wrapper class provides functions for accessing htrace 4+ functionality in a simplified way. * This wrapper class provides functions for accessing htrace 4+ functionality in a simplified way.
*/ */
@InterfaceAudience.Private
public final class TraceUtil { public final class TraceUtil {
private static HTraceConfiguration conf; private static HTraceConfiguration conf;
private static Tracer tracer; private static Tracer tracer;

View File

@ -16,6 +16,9 @@
*/ */
package org.apache.hadoop.hbase.util; package org.apache.hadoop.hbase.util;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.core.JsonGenerator;
import java.io.Closeable; import java.io.Closeable;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStreamWriter; import java.io.OutputStreamWriter;
@ -25,7 +28,6 @@ import java.lang.reflect.Array;
import java.nio.charset.StandardCharsets; import java.nio.charset.StandardCharsets;
import java.util.Iterator; import java.util.Iterator;
import java.util.Set; import java.util.Set;
import javax.management.AttributeNotFoundException; import javax.management.AttributeNotFoundException;
import javax.management.InstanceNotFoundException; import javax.management.InstanceNotFoundException;
import javax.management.IntrospectionException; import javax.management.IntrospectionException;
@ -41,17 +43,14 @@ import javax.management.RuntimeMBeanException;
import javax.management.openmbean.CompositeData; import javax.management.openmbean.CompositeData;
import javax.management.openmbean.CompositeType; import javax.management.openmbean.CompositeType;
import javax.management.openmbean.TabularData; import javax.management.openmbean.TabularData;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonFactory;
import com.fasterxml.jackson.core.JsonGenerationException;
import com.fasterxml.jackson.core.JsonGenerator;
/** /**
* Utility for doing JSON and MBeans. * Utility for doing JSON and MBeans.
*/ */
@InterfaceAudience.Private
public class JSONBean { public class JSONBean {
private static final Logger LOG = LoggerFactory.getLogger(JSONBean.class); private static final Logger LOG = LoggerFactory.getLogger(JSONBean.class);
private final JsonFactory jsonFactory; private final JsonFactory jsonFactory;

View File

@ -17,6 +17,9 @@
* */ * */
package org.apache.hadoop.hbase.util; package org.apache.hadoop.hbase.util;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
import java.beans.IntrospectionException; import java.beans.IntrospectionException;
import java.io.IOException; import java.io.IOException;
import java.io.PrintWriter; import java.io.PrintWriter;
@ -28,7 +31,6 @@ import java.lang.management.RuntimeMXBean;
import java.util.Hashtable; import java.util.Hashtable;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import javax.management.InstanceNotFoundException; import javax.management.InstanceNotFoundException;
import javax.management.MBeanAttributeInfo; import javax.management.MBeanAttributeInfo;
import javax.management.MBeanInfo; import javax.management.MBeanInfo;
@ -37,14 +39,11 @@ import javax.management.MalformedObjectNameException;
import javax.management.ObjectName; import javax.management.ObjectName;
import javax.management.ReflectionException; import javax.management.ReflectionException;
import javax.management.openmbean.CompositeData; import javax.management.openmbean.CompositeData;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import com.fasterxml.jackson.core.JsonProcessingException; @InterfaceAudience.Private
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.ObjectMapper;
public final class JSONMetricUtil { public final class JSONMetricUtil {
private static final Logger LOG = LoggerFactory.getLogger(JSONMetricUtil.class); private static final Logger LOG = LoggerFactory.getLogger(JSONMetricUtil.class);

View File

@ -65,6 +65,10 @@
</execution> </execution>
</executions> </executions>
</plugin> </plugin>
<plugin>
<groupId>net.revelc.code</groupId>
<artifactId>warbucks-maven-plugin</artifactId>
</plugin>
</plugins> </plugins>
<pluginManagement> <pluginManagement>
<plugins> <plugins>

View File

@ -65,6 +65,10 @@
</execution> </execution>
</executions> </executions>
</plugin> </plugin>
<plugin>
<groupId>net.revelc.code</groupId>
<artifactId>warbucks-maven-plugin</artifactId>
</plugin>
</plugins> </plugins>
<pluginManagement> <pluginManagement>
<plugins> <plugins>

View File

@ -23,7 +23,6 @@ import java.util.concurrent.ExecutorService;
import java.util.concurrent.Executors; import java.util.concurrent.Executors;
import java.util.concurrent.atomic.AtomicReference; import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.IntStream; import java.util.stream.IntStream;
import org.apache.commons.io.IOUtils; import org.apache.commons.io.IOUtils;
import org.apache.hadoop.conf.Configured; import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
@ -36,12 +35,14 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
/** /**
* A simple example shows how to use asynchronous client. * A simple example shows how to use asynchronous client.
*/ */
@InterfaceAudience.Private
public class AsyncClientExample extends Configured implements Tool { public class AsyncClientExample extends Configured implements Tool {
private static final Logger LOG = LoggerFactory.getLogger(AsyncClientExample.class); private static final Logger LOG = LoggerFactory.getLogger(AsyncClientExample.class);

View File

@ -28,7 +28,6 @@ import java.util.concurrent.Executors;
import java.util.concurrent.Future; import java.util.concurrent.Future;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException; import java.util.concurrent.TimeoutException;
import org.apache.hadoop.conf.Configured; import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.BufferedMutator; import org.apache.hadoop.hbase.client.BufferedMutator;
@ -40,12 +39,14 @@ import org.apache.hadoop.hbase.client.RetriesExhaustedWithDetailsException;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
/** /**
* An example of using the {@link BufferedMutator} interface. * An example of using the {@link BufferedMutator} interface.
*/ */
@InterfaceAudience.Private
public class BufferedMutatorExample extends Configured implements Tool { public class BufferedMutatorExample extends Configured implements Tool {
private static final Logger LOG = LoggerFactory.getLogger(BufferedMutatorExample.class); private static final Logger LOG = LoggerFactory.getLogger(BufferedMutatorExample.class);

View File

@ -17,6 +17,9 @@
*/ */
package org.apache.hadoop.hbase.client.example; package org.apache.hadoop.hbase.client.example;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
@ -28,14 +31,11 @@ import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.coprocessor.Export;
import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.coprocessor.Export;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/** /**
* A simple example on how to use {@link org.apache.hadoop.hbase.coprocessor.Export}. * A simple example on how to use {@link org.apache.hadoop.hbase.coprocessor.Export}.
@ -45,7 +45,8 @@ import java.util.Map;
* hbase-endpoint/src/main/protobuf/Export.proto. * hbase-endpoint/src/main/protobuf/Export.proto.
* </p> * </p>
*/ */
public class ExportEndpointExample { @InterfaceAudience.Private
public final class ExportEndpointExample {
public static void main(String[] args) throws Throwable { public static void main(String[] args) throws Throwable {
int rowCount = 100; int rowCount = 100;
@ -83,4 +84,6 @@ public class ExportEndpointExample {
System.out.println("total cells:" + totalOutputCells); System.out.println("total cells:" + totalOutputCells);
} }
} }
private ExportEndpointExample(){}
} }

View File

@ -21,7 +21,6 @@ import java.io.IOException;
import java.net.InetSocketAddress; import java.net.InetSocketAddress;
import java.util.Optional; import java.util.Optional;
import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutionException;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
@ -31,6 +30,7 @@ import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.ipc.NettyRpcClientConfigHelper; import org.apache.hadoop.hbase.ipc.NettyRpcClientConfigHelper;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
import org.apache.hbase.thirdparty.com.google.common.base.Throwables; import org.apache.hbase.thirdparty.com.google.common.base.Throwables;
@ -71,6 +71,7 @@ import org.apache.hbase.thirdparty.io.netty.util.concurrent.GlobalEventExecutor;
* Use HTTP GET to fetch data, and use HTTP PUT to put data. Encode the value as the request content * Use HTTP GET to fetch data, and use HTTP PUT to put data. Encode the value as the request content
* when doing PUT. * when doing PUT.
*/ */
@InterfaceAudience.Private
public class HttpProxyExample { public class HttpProxyExample {
private final EventLoopGroup bossGroup = new NioEventLoopGroup(1); private final EventLoopGroup bossGroup = new NioEventLoopGroup(1);

View File

@ -29,7 +29,6 @@ import java.util.concurrent.Future;
import java.util.concurrent.ThreadFactory; import java.util.concurrent.ThreadFactory;
import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.ThreadLocalRandom;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import org.apache.hadoop.conf.Configured; import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.Cell.Type; import org.apache.hadoop.hbase.Cell.Type;
@ -48,6 +47,7 @@ import org.apache.hadoop.hbase.filter.KeyOnlyFilter;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -90,6 +90,7 @@ import org.apache.hbase.thirdparty.com.google.common.util.concurrent.ThreadFacto
* </ul> * </ul>
* *
*/ */
@InterfaceAudience.Private
public class MultiThreadedClientExample extends Configured implements Tool { public class MultiThreadedClientExample extends Configured implements Tool {
private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedClientExample.class); private static final Logger LOG = LoggerFactory.getLogger(MultiThreadedClientExample.class);
private static final int DEFAULT_NUM_OPERATIONS = 500000; private static final int DEFAULT_NUM_OPERATIONS = 500000;

View File

@ -19,6 +19,8 @@
package org.apache.hadoop.hbase.client.example; package org.apache.hadoop.hbase.client.example;
import java.io.Closeable;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
@ -29,16 +31,15 @@ import org.apache.hadoop.hbase.client.coprocessor.Batch;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils.BlockingRpcCallback; import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils.BlockingRpcCallback;
import org.apache.hadoop.hbase.ipc.ServerRpcController; import org.apache.hadoop.hbase.ipc.ServerRpcController;
import org.apache.hadoop.hbase.protobuf.generated.RefreshHFilesProtos; import org.apache.hadoop.hbase.protobuf.generated.RefreshHFilesProtos;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.Closeable;
import java.io.IOException;
/** /**
* This client class is for invoking the refresh HFile function deployed on the * This client class is for invoking the refresh HFile function deployed on the
* Region Server side via the RefreshHFilesService. * Region Server side via the RefreshHFilesService.
*/ */
@InterfaceAudience.Private
public class RefreshHFilesClient implements Closeable { public class RefreshHFilesClient implements Closeable {
private static final Logger LOG = LoggerFactory.getLogger(RefreshHFilesClient.class); private static final Logger LOG = LoggerFactory.getLogger(RefreshHFilesClient.class);
private final Connection connection; private final Connection connection;

View File

@ -17,6 +17,9 @@
*/ */
package org.apache.hadoop.hbase.coprocessor.example; package org.apache.hadoop.hbase.coprocessor.example;
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
import com.google.protobuf.Service;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
@ -24,7 +27,6 @@ import java.util.HashSet;
import java.util.List; import java.util.List;
import java.util.Set; import java.util.Set;
import java.util.TreeSet; import java.util.TreeSet;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.CoprocessorEnvironment;
@ -48,13 +50,10 @@ import org.apache.hadoop.hbase.regionserver.OperationStatus;
import org.apache.hadoop.hbase.regionserver.Region; import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.regionserver.RegionScanner; import org.apache.hadoop.hbase.regionserver.RegionScanner;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
import com.google.protobuf.Service;
/** /**
* Defines a protocol to delete data in bulk based on a scan. The scan can be range scan or with * Defines a protocol to delete data in bulk based on a scan. The scan can be range scan or with
* conditions(filters) etc.This can be used to delete rows, column family(s), column qualifier(s) * conditions(filters) etc.This can be used to delete rows, column family(s), column qualifier(s)
@ -95,6 +94,7 @@ import com.google.protobuf.Service;
* } * }
* </code></pre> * </code></pre>
*/ */
@InterfaceAudience.Private
public class BulkDeleteEndpoint extends BulkDeleteService implements RegionCoprocessor { public class BulkDeleteEndpoint extends BulkDeleteService implements RegionCoprocessor {
private static final String NO_OF_VERSIONS_TO_DELETE = "noOfVersionsToDelete"; private static final String NO_OF_VERSIONS_TO_DELETE = "noOfVersionsToDelete";
private static final Logger LOG = LoggerFactory.getLogger(BulkDeleteEndpoint.class); private static final Logger LOG = LoggerFactory.getLogger(BulkDeleteEndpoint.class);

View File

@ -19,14 +19,15 @@ package org.apache.hadoop.hbase.coprocessor.example;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.regionserver.ScannerContext; import org.apache.hadoop.hbase.regionserver.ScannerContext;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* A simple delegation for doing filtering on {@link InternalScanner}. * A simple delegation for doing filtering on {@link InternalScanner}.
*/ */
@InterfaceAudience.Private
public class DelegatingInternalScanner implements InternalScanner { public class DelegatingInternalScanner implements InternalScanner {
protected final InternalScanner scanner; protected final InternalScanner scanner;

View File

@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.coprocessor.example;
import java.io.IOException; import java.io.IOException;
import java.util.Optional; import java.util.Optional;
import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.RegionInfo;
@ -33,6 +32,7 @@ import org.apache.hadoop.hbase.metrics.Counter;
import org.apache.hadoop.hbase.metrics.Gauge; import org.apache.hadoop.hbase.metrics.Gauge;
import org.apache.hadoop.hbase.metrics.MetricRegistry; import org.apache.hadoop.hbase.metrics.MetricRegistry;
import org.apache.hadoop.hbase.metrics.Timer; import org.apache.hadoop.hbase.metrics.Timer;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -47,6 +47,7 @@ import org.slf4j.LoggerFactory;
* </p> * </p>
* @see ExampleRegionObserverWithMetrics * @see ExampleRegionObserverWithMetrics
*/ */
@InterfaceAudience.Private
public class ExampleMasterObserverWithMetrics implements MasterCoprocessor, MasterObserver { public class ExampleMasterObserverWithMetrics implements MasterCoprocessor, MasterObserver {
@Override @Override
public Optional<MasterObserver> getMasterObserver() { public Optional<MasterObserver> getMasterObserver() {

View File

@ -24,7 +24,6 @@ import java.io.IOException;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import java.util.concurrent.ThreadLocalRandom; import java.util.concurrent.ThreadLocalRandom;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Get;
@ -40,6 +39,7 @@ import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.StoreFile; import org.apache.hadoop.hbase.regionserver.StoreFile;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker; import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest; import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* An example coprocessor that collects some metrics to demonstrate the usage of exporting custom * An example coprocessor that collects some metrics to demonstrate the usage of exporting custom
@ -52,6 +52,7 @@ import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
* *
* @see ExampleMasterObserverWithMetrics * @see ExampleMasterObserverWithMetrics
*/ */
@InterfaceAudience.Private
public class ExampleRegionObserverWithMetrics implements RegionCoprocessor { public class ExampleRegionObserverWithMetrics implements RegionCoprocessor {
private Counter preGetCounter; private Counter preGetCounter;

View File

@ -21,7 +21,8 @@ package org.apache.hadoop.hbase.coprocessor.example;
import com.google.protobuf.RpcCallback; import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController; import com.google.protobuf.RpcController;
import com.google.protobuf.Service; import com.google.protobuf.Service;
import java.io.IOException;
import java.util.Collections;
import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.CoprocessorException; import org.apache.hadoop.hbase.coprocessor.CoprocessorException;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
@ -29,12 +30,10 @@ import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
import org.apache.hadoop.hbase.protobuf.generated.RefreshHFilesProtos; import org.apache.hadoop.hbase.protobuf.generated.RefreshHFilesProtos;
import org.apache.hadoop.hbase.regionserver.Store; import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import java.io.IOException;
import java.util.Collections;
/** /**
* Coprocessor endpoint to refresh HFiles on replica. * Coprocessor endpoint to refresh HFiles on replica.
* <p> * <p>
@ -43,6 +42,7 @@ import java.util.Collections;
* hbase-protocol/src/main/protobuf/RefreshHFiles.proto. * hbase-protocol/src/main/protobuf/RefreshHFiles.proto.
* </p> * </p>
*/ */
@InterfaceAudience.Private
public class RefreshHFilesEndpoint extends RefreshHFilesProtos.RefreshHFilesService public class RefreshHFilesEndpoint extends RefreshHFilesProtos.RefreshHFilesService
implements RegionCoprocessor { implements RegionCoprocessor {
protected static final Logger LOG = LoggerFactory.getLogger(RefreshHFilesEndpoint.class); protected static final Logger LOG = LoggerFactory.getLogger(RefreshHFilesEndpoint.class);

View File

@ -18,11 +18,13 @@
package org.apache.hadoop.hbase.coprocessor.example; package org.apache.hadoop.hbase.coprocessor.example;
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
import com.google.protobuf.Service;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.List; import java.util.List;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.CoprocessorEnvironment;
@ -35,10 +37,7 @@ import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils; import org.apache.hadoop.hbase.ipc.CoprocessorRpcUtils;
import org.apache.hadoop.hbase.regionserver.InternalScanner; import org.apache.hadoop.hbase.regionserver.InternalScanner;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
import com.google.protobuf.RpcCallback;
import com.google.protobuf.RpcController;
import com.google.protobuf.Service;
/** /**
* Sample coprocessor endpoint exposing a Service interface for counting rows and key values. * Sample coprocessor endpoint exposing a Service interface for counting rows and key values.
@ -48,6 +47,7 @@ import com.google.protobuf.Service;
* hbase-examples/src/main/protobuf/Examples.proto. * hbase-examples/src/main/protobuf/Examples.proto.
* </p> * </p>
*/ */
@InterfaceAudience.Private
public class RowCountEndpoint extends ExampleProtos.RowCountService implements RegionCoprocessor { public class RowCountEndpoint extends ExampleProtos.RowCountService implements RegionCoprocessor {
private RegionCoprocessorEnvironment env; private RegionCoprocessorEnvironment env;

View File

@ -18,7 +18,6 @@ package org.apache.hadoop.hbase.coprocessor.example;
import java.io.IOException; import java.io.IOException;
import java.util.Optional; import java.util.Optional;
import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.coprocessor.ObserverContext;
@ -26,11 +25,13 @@ import org.apache.hadoop.hbase.coprocessor.RegionCoprocessor;
import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.RegionCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.RegionObserver; import org.apache.hadoop.hbase.coprocessor.RegionObserver;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* A RegionObserver which modifies incoming Scan requests to include additional * A RegionObserver which modifies incoming Scan requests to include additional
* columns than what the user actually requested. * columns than what the user actually requested.
*/ */
@InterfaceAudience.Private
public class ScanModifyingObserver implements RegionCoprocessor, RegionObserver { public class ScanModifyingObserver implements RegionCoprocessor, RegionObserver {
public static final String FAMILY_TO_ADD_KEY = "hbase.examples.coprocessor.scanmodifying.family"; public static final String FAMILY_TO_ADD_KEY = "hbase.examples.coprocessor.scanmodifying.family";

View File

@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.coprocessor.example;
import java.io.IOException; import java.io.IOException;
import java.util.List; import java.util.List;
import java.util.Optional; import java.util.Optional;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilder; import org.apache.hadoop.hbase.CellBuilder;
import org.apache.hadoop.hbase.CellBuilderFactory; import org.apache.hadoop.hbase.CellBuilderFactory;
@ -37,10 +36,12 @@ import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker; import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest; import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* This RegionObserver replaces the values of Puts from one value to another on compaction. * This RegionObserver replaces the values of Puts from one value to another on compaction.
*/ */
@InterfaceAudience.Private
public class ValueRewritingObserver implements RegionObserver, RegionCoprocessor { public class ValueRewritingObserver implements RegionObserver, RegionCoprocessor {
public static final String ORIGINAL_VALUE_KEY = public static final String ORIGINAL_VALUE_KEY =
"hbase.examples.coprocessor.value.rewrite.orig"; "hbase.examples.coprocessor.value.rewrite.orig";

View File

@ -26,7 +26,6 @@ import java.util.NavigableMap;
import java.util.Optional; import java.util.Optional;
import java.util.TreeMap; import java.util.TreeMap;
import java.util.stream.IntStream; import java.util.stream.IntStream;
import org.apache.commons.lang3.mutable.MutableLong; import org.apache.commons.lang3.mutable.MutableLong;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilderFactory; import org.apache.hadoop.hbase.CellBuilderFactory;
@ -52,6 +51,7 @@ import org.apache.hadoop.hbase.regionserver.Store;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker; import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTracker;
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest; import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hbase.thirdparty.com.google.common.math.IntMath; import org.apache.hbase.thirdparty.com.google.common.math.IntMath;
@ -64,6 +64,7 @@ import org.apache.hbase.thirdparty.com.google.common.math.IntMath;
* Notice that this is only an example so we do not handle most corner cases, for example, you must * Notice that this is only an example so we do not handle most corner cases, for example, you must
* provide a qualifier when doing a get. * provide a qualifier when doing a get.
*/ */
@InterfaceAudience.Private
public class WriteHeavyIncrementObserver implements RegionCoprocessor, RegionObserver { public class WriteHeavyIncrementObserver implements RegionCoprocessor, RegionObserver {
@Override @Override

View File

@ -21,7 +21,6 @@ package org.apache.hadoop.hbase.coprocessor.example;
import java.io.IOException; import java.io.IOException;
import java.util.Optional; import java.util.Optional;
import java.util.OptionalLong; import java.util.OptionalLong;
import org.apache.curator.framework.CuratorFramework; import org.apache.curator.framework.CuratorFramework;
import org.apache.curator.framework.CuratorFrameworkFactory; import org.apache.curator.framework.CuratorFrameworkFactory;
import org.apache.curator.framework.recipes.cache.ChildData; import org.apache.curator.framework.recipes.cache.ChildData;
@ -40,6 +39,7 @@ import org.apache.hadoop.hbase.regionserver.compactions.CompactionLifeCycleTrack
import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest; import org.apache.hadoop.hbase.regionserver.compactions.CompactionRequest;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* This is an example showing how a RegionObserver could configured via ZooKeeper in order to * This is an example showing how a RegionObserver could configured via ZooKeeper in order to
@ -51,6 +51,7 @@ import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
* successful backup via ZK and instruct HBase that to safely delete the data which has already been * successful backup via ZK and instruct HBase that to safely delete the data which has already been
* backup. * backup.
*/ */
@InterfaceAudience.Private
public class ZooKeeperScanPolicyObserver implements RegionCoprocessor, RegionObserver { public class ZooKeeperScanPolicyObserver implements RegionCoprocessor, RegionObserver {
@Override @Override

View File

@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.mapreduce;
import java.io.IOException; import java.io.IOException;
import java.util.TreeMap; import java.util.TreeMap;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured; import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
@ -33,6 +32,7 @@ import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Mapper;
import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Example map/reduce job to construct index tables that can be used to quickly * Example map/reduce job to construct index tables that can be used to quickly
@ -64,6 +64,7 @@ import org.apache.hadoop.util.ToolRunner;
* This code was written against HBase 0.21 trunk. * This code was written against HBase 0.21 trunk.
* </p> * </p>
*/ */
@InterfaceAudience.Private
public class IndexBuilder extends Configured implements Tool { public class IndexBuilder extends Configured implements Tool {
/** the column family containing the indexed row key */ /** the column family containing the indexed row key */
public static final byte[] INDEX_COLUMN = Bytes.toBytes("INDEX"); public static final byte[] INDEX_COLUMN = Bytes.toBytes("INDEX");

View File

@ -19,7 +19,6 @@
package org.apache.hadoop.hbase.mapreduce; package org.apache.hadoop.hbase.mapreduce;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured; import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -35,6 +34,7 @@ import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat; import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Sample Uploader MapReduce * Sample Uploader MapReduce
@ -58,6 +58,7 @@ import org.apache.hadoop.util.ToolRunner;
* <p> * <p>
* This code was written against HBase 0.21 trunk. * This code was written against HBase 0.21 trunk.
*/ */
@InterfaceAudience.Private
public class SampleUploader extends Configured implements Tool { public class SampleUploader extends Configured implements Tool {
private static final String NAME = "SampleUploader"; private static final String NAME = "SampleUploader";

View File

@ -31,13 +31,11 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.SortedMap; import java.util.SortedMap;
import java.util.TreeMap; import java.util.TreeMap;
import javax.security.auth.Subject; import javax.security.auth.Subject;
import javax.security.auth.login.AppConfigurationEntry; import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.Configuration; import javax.security.auth.login.Configuration;
import javax.security.auth.login.LoginContext; import javax.security.auth.login.LoginContext;
import javax.security.sasl.Sasl; import javax.security.sasl.Sasl;
import org.apache.hadoop.hbase.thrift.generated.AlreadyExists; import org.apache.hadoop.hbase.thrift.generated.AlreadyExists;
import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor; import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
import org.apache.hadoop.hbase.thrift.generated.Hbase; import org.apache.hadoop.hbase.thrift.generated.Hbase;
@ -49,10 +47,12 @@ import org.apache.thrift.protocol.TProtocol;
import org.apache.thrift.transport.TSaslClientTransport; import org.apache.thrift.transport.TSaslClientTransport;
import org.apache.thrift.transport.TSocket; import org.apache.thrift.transport.TSocket;
import org.apache.thrift.transport.TTransport; import org.apache.thrift.transport.TTransport;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* See the instructions under hbase-examples/README.txt * See the instructions under hbase-examples/README.txt
*/ */
@InterfaceAudience.Private
public class DemoClient { public class DemoClient {
static protected int port; static protected int port;

View File

@ -18,7 +18,6 @@
*/ */
package org.apache.hadoop.hbase.thrift; package org.apache.hadoop.hbase.thrift;
import java.io.UnsupportedEncodingException; import java.io.UnsupportedEncodingException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
import java.nio.charset.CharacterCodingException; import java.nio.charset.CharacterCodingException;
@ -31,12 +30,10 @@ import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.SortedMap; import java.util.SortedMap;
import java.util.TreeMap; import java.util.TreeMap;
import javax.security.auth.Subject; import javax.security.auth.Subject;
import javax.security.auth.login.AppConfigurationEntry; import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.Configuration; import javax.security.auth.login.Configuration;
import javax.security.auth.login.LoginContext; import javax.security.auth.login.LoginContext;
import org.apache.hadoop.hbase.thrift.generated.AlreadyExists; import org.apache.hadoop.hbase.thrift.generated.AlreadyExists;
import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor; import org.apache.hadoop.hbase.thrift.generated.ColumnDescriptor;
import org.apache.hadoop.hbase.thrift.generated.Hbase; import org.apache.hadoop.hbase.thrift.generated.Hbase;
@ -48,6 +45,7 @@ import org.apache.thrift.protocol.TProtocol;
import org.apache.thrift.transport.THttpClient; import org.apache.thrift.transport.THttpClient;
import org.apache.thrift.transport.TSocket; import org.apache.thrift.transport.TSocket;
import org.apache.thrift.transport.TTransport; import org.apache.thrift.transport.TTransport;
import org.apache.yetus.audience.InterfaceAudience;
import org.ietf.jgss.GSSContext; import org.ietf.jgss.GSSContext;
import org.ietf.jgss.GSSCredential; import org.ietf.jgss.GSSCredential;
import org.ietf.jgss.GSSException; import org.ietf.jgss.GSSException;
@ -58,6 +56,7 @@ import org.ietf.jgss.Oid;
/** /**
* See the instructions under hbase-examples/README.txt * See the instructions under hbase-examples/README.txt
*/ */
@InterfaceAudience.Private
public class HttpDoAsClient { public class HttpDoAsClient {
static protected int port; static protected int port;

View File

@ -24,15 +24,12 @@ import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import javax.security.auth.Subject; import javax.security.auth.Subject;
import javax.security.auth.login.AppConfigurationEntry; import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.Configuration; import javax.security.auth.login.Configuration;
import javax.security.auth.login.LoginContext; import javax.security.auth.login.LoginContext;
import javax.security.sasl.Sasl; import javax.security.sasl.Sasl;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.thrift2.generated.TColumnValue; import org.apache.hadoop.hbase.thrift2.generated.TColumnValue;
import org.apache.hadoop.hbase.thrift2.generated.TGet; import org.apache.hadoop.hbase.thrift2.generated.TGet;
import org.apache.hadoop.hbase.thrift2.generated.THBaseService; import org.apache.hadoop.hbase.thrift2.generated.THBaseService;
@ -44,7 +41,9 @@ import org.apache.thrift.transport.TFramedTransport;
import org.apache.thrift.transport.TSaslClientTransport; import org.apache.thrift.transport.TSaslClientTransport;
import org.apache.thrift.transport.TSocket; import org.apache.thrift.transport.TSocket;
import org.apache.thrift.transport.TTransport; import org.apache.thrift.transport.TTransport;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class DemoClient { public class DemoClient {
private static String host = "localhost"; private static String host = "localhost";

View File

@ -19,14 +19,15 @@ package org.apache.hadoop.hbase.types;
import com.google.protobuf.CodedInputStream; import com.google.protobuf.CodedInputStream;
import com.google.protobuf.CodedOutputStream; import com.google.protobuf.CodedOutputStream;
import java.io.IOException;
import org.apache.hadoop.hbase.protobuf.generated.CellProtos; import org.apache.hadoop.hbase.protobuf.generated.CellProtos;
import org.apache.hadoop.hbase.util.PositionedByteRange; import org.apache.hadoop.hbase.util.PositionedByteRange;
import org.apache.yetus.audience.InterfaceAudience;
import java.io.IOException;
/** /**
* An example for using protobuf objects with {@link DataType} API. * An example for using protobuf objects with {@link DataType} API.
*/ */
@InterfaceAudience.Private
public class PBCell extends PBType<CellProtos.Cell> { public class PBCell extends PBType<CellProtos.Cell> {
@Override @Override
public Class<CellProtos.Cell> encodedClass() { public Class<CellProtos.Cell> encodedClass() {

View File

@ -84,6 +84,10 @@
<failOnViolation>true</failOnViolation> <failOnViolation>true</failOnViolation>
</configuration> </configuration>
</plugin> </plugin>
<plugin>
<groupId>net.revelc.code</groupId>
<artifactId>warbucks-maven-plugin</artifactId>
</plugin>
</plugins> </plugins>
<pluginManagement> <pluginManagement>
<plugins> <plugins>

View File

@ -49,6 +49,10 @@
<groupId>org.apache.maven.plugins</groupId> <groupId>org.apache.maven.plugins</groupId>
<artifactId>maven-source-plugin</artifactId> <artifactId>maven-source-plugin</artifactId>
</plugin> </plugin>
<plugin>
<groupId>net.revelc.code</groupId>
<artifactId>warbucks-maven-plugin</artifactId>
</plugin>
</plugins> </plugins>
<pluginManagement> <pluginManagement>
<plugins> <plugins>

View File

@ -21,12 +21,14 @@ package org.apache.hadoop.hbase;
import java.util.Iterator; import java.util.Iterator;
import java.util.ServiceLoader; import java.util.ServiceLoader;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
/** /**
* Class that will create many instances of classes provided by the hbase-hadoop{1|2}-compat jars. * Class that will create many instances of classes provided by the hbase-hadoop{1|2}-compat jars.
*/ */
@InterfaceAudience.Private
public class CompatibilityFactory { public class CompatibilityFactory {
private static final Logger LOG = LoggerFactory.getLogger(CompatibilitySingletonFactory.class); private static final Logger LOG = LoggerFactory.getLogger(CompatibilitySingletonFactory.class);

View File

@ -23,6 +23,7 @@ import java.util.Iterator;
import java.util.Map; import java.util.Map;
import java.util.ServiceLoader; import java.util.ServiceLoader;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -30,6 +31,7 @@ import org.slf4j.LoggerFactory;
* Factory for classes supplied by hadoop compatibility modules. Only one of each class will be * Factory for classes supplied by hadoop compatibility modules. Only one of each class will be
* created. * created.
*/ */
@InterfaceAudience.Private
public class CompatibilitySingletonFactory extends CompatibilityFactory { public class CompatibilitySingletonFactory extends CompatibilityFactory {
public static enum SingletonStorage { public static enum SingletonStorage {
INSTANCE; INSTANCE;

View File

@ -19,7 +19,9 @@
package org.apache.hadoop.hbase.io; package org.apache.hadoop.hbase.io;
import org.apache.hadoop.hbase.metrics.BaseSource; import org.apache.hadoop.hbase.metrics.BaseSource;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public interface MetricsIOSource extends BaseSource { public interface MetricsIOSource extends BaseSource {
/** /**

View File

@ -18,6 +18,9 @@
package org.apache.hadoop.hbase.io; package org.apache.hadoop.hbase.io;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public interface MetricsIOWrapper { public interface MetricsIOWrapper {
long getChecksumFailures(); long getChecksumFailures();

View File

@ -20,7 +20,9 @@
package org.apache.hadoop.hbase.ipc; package org.apache.hadoop.hbase.ipc;
import org.apache.hadoop.hbase.metrics.ExceptionTrackingSource; import org.apache.hadoop.hbase.metrics.ExceptionTrackingSource;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public interface MetricsHBaseServerSource extends ExceptionTrackingSource { public interface MetricsHBaseServerSource extends ExceptionTrackingSource {
String AUTHORIZATION_SUCCESSES_NAME = "authorizationSuccesses"; String AUTHORIZATION_SUCCESSES_NAME = "authorizationSuccesses";
String AUTHORIZATION_SUCCESSES_DESC = String AUTHORIZATION_SUCCESSES_DESC =

View File

@ -19,6 +19,9 @@
package org.apache.hadoop.hbase.ipc; package org.apache.hadoop.hbase.ipc;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public abstract class MetricsHBaseServerSourceFactory { public abstract class MetricsHBaseServerSourceFactory {
/** /**
* The name of the metrics * The name of the metrics

View File

@ -19,6 +19,9 @@
package org.apache.hadoop.hbase.ipc; package org.apache.hadoop.hbase.ipc;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public interface MetricsHBaseServerWrapper { public interface MetricsHBaseServerWrapper {
long getTotalQueueSize(); long getTotalQueueSize();

View File

@ -20,7 +20,9 @@ package org.apache.hadoop.hbase.master;
import org.apache.hadoop.hbase.metrics.BaseSource; import org.apache.hadoop.hbase.metrics.BaseSource;
import org.apache.hadoop.hbase.metrics.OperationMetrics; import org.apache.hadoop.hbase.metrics.OperationMetrics;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public interface MetricsAssignmentManagerSource extends BaseSource { public interface MetricsAssignmentManagerSource extends BaseSource {
/** /**

View File

@ -19,7 +19,9 @@
package org.apache.hadoop.hbase.master; package org.apache.hadoop.hbase.master;
import org.apache.hadoop.hbase.metrics.BaseSource; import org.apache.hadoop.hbase.metrics.BaseSource;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public interface MetricsMasterFileSystemSource extends BaseSource { public interface MetricsMasterFileSystemSource extends BaseSource {
/** /**

View File

@ -19,10 +19,12 @@
package org.apache.hadoop.hbase.master; package org.apache.hadoop.hbase.master;
import org.apache.hadoop.hbase.metrics.BaseSource; import org.apache.hadoop.hbase.metrics.BaseSource;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Interface that classes that expose metrics about the master will implement. * Interface that classes that expose metrics about the master will implement.
*/ */
@InterfaceAudience.Private
public interface MetricsMasterProcSource extends BaseSource { public interface MetricsMasterProcSource extends BaseSource {
/** /**

View File

@ -18,9 +18,12 @@
package org.apache.hadoop.hbase.master; package org.apache.hadoop.hbase.master;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Interface of a factory to create MetricsMasterSource when given a MetricsMasterWrapper * Interface of a factory to create MetricsMasterSource when given a MetricsMasterWrapper
*/ */
@InterfaceAudience.Private
public interface MetricsMasterProcSourceFactory { public interface MetricsMasterProcSourceFactory {
MetricsMasterProcSource create(MetricsMasterWrapper masterWrapper); MetricsMasterProcSource create(MetricsMasterWrapper masterWrapper);

View File

@ -17,10 +17,12 @@
package org.apache.hadoop.hbase.master; package org.apache.hadoop.hbase.master;
import org.apache.hadoop.hbase.metrics.BaseSource; import org.apache.hadoop.hbase.metrics.BaseSource;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* A collection of exposed metrics for space quotas from the HBase Master. * A collection of exposed metrics for space quotas from the HBase Master.
*/ */
@InterfaceAudience.Private
public interface MetricsMasterQuotaSource extends BaseSource { public interface MetricsMasterQuotaSource extends BaseSource {
String METRICS_NAME = "Quotas"; String METRICS_NAME = "Quotas";

View File

@ -16,9 +16,12 @@
*/ */
package org.apache.hadoop.hbase.master; package org.apache.hadoop.hbase.master;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Interface of a factory to create MetricsMasterQuotaSource when given a MetricsMasterWrapper. * Interface of a factory to create MetricsMasterQuotaSource when given a MetricsMasterWrapper.
*/ */
@InterfaceAudience.Private
public interface MetricsMasterQuotaSourceFactory { public interface MetricsMasterQuotaSourceFactory {
MetricsMasterQuotaSource create(MetricsMasterWrapper masterWrapper); MetricsMasterQuotaSource create(MetricsMasterWrapper masterWrapper);

View File

@ -20,10 +20,12 @@ package org.apache.hadoop.hbase.master;
import org.apache.hadoop.hbase.metrics.BaseSource; import org.apache.hadoop.hbase.metrics.BaseSource;
import org.apache.hadoop.hbase.metrics.OperationMetrics; import org.apache.hadoop.hbase.metrics.OperationMetrics;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Interface that classes that expose metrics about the master will implement. * Interface that classes that expose metrics about the master will implement.
*/ */
@InterfaceAudience.Private
public interface MetricsMasterSource extends BaseSource { public interface MetricsMasterSource extends BaseSource {
/** /**

View File

@ -18,9 +18,12 @@
package org.apache.hadoop.hbase.master; package org.apache.hadoop.hbase.master;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Interface of a factory to create MetricsMasterSource when given a MetricsMasterWrapper * Interface of a factory to create MetricsMasterSource when given a MetricsMasterWrapper
*/ */
@InterfaceAudience.Private
public interface MetricsMasterSourceFactory { public interface MetricsMasterSourceFactory {
MetricsMasterSource create(MetricsMasterWrapper masterWrapper); MetricsMasterSource create(MetricsMasterWrapper masterWrapper);

View File

@ -20,11 +20,13 @@ package org.apache.hadoop.hbase.master;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry; import java.util.Map.Entry;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* This is the interface that will expose information to hadoop1/hadoop2 implementations of the * This is the interface that will expose information to hadoop1/hadoop2 implementations of the
* MetricsMasterSource. * MetricsMasterSource.
*/ */
@InterfaceAudience.Private
public interface MetricsMasterWrapper { public interface MetricsMasterWrapper {
/** /**

View File

@ -19,7 +19,9 @@
package org.apache.hadoop.hbase.master; package org.apache.hadoop.hbase.master;
import org.apache.hadoop.hbase.metrics.BaseSource; import org.apache.hadoop.hbase.metrics.BaseSource;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public interface MetricsSnapshotSource extends BaseSource { public interface MetricsSnapshotSource extends BaseSource {
/** /**
* The name of the metrics * The name of the metrics

View File

@ -19,7 +19,9 @@
package org.apache.hadoop.hbase.master.balancer; package org.apache.hadoop.hbase.master.balancer;
import org.apache.hadoop.hbase.metrics.BaseSource; import org.apache.hadoop.hbase.metrics.BaseSource;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public interface MetricsBalancerSource extends BaseSource { public interface MetricsBalancerSource extends BaseSource {
/** /**

View File

@ -18,12 +18,15 @@
package org.apache.hadoop.hbase.master.balancer; package org.apache.hadoop.hbase.master.balancer;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* This interface extends the basic metrics balancer source to add a function * This interface extends the basic metrics balancer source to add a function
* to report metrics that related to stochastic load balancer. The purpose is to * to report metrics that related to stochastic load balancer. The purpose is to
* offer an insight to the internal cost calculations that can be useful to tune * offer an insight to the internal cost calculations that can be useful to tune
* the balancer. For details, refer to HBASE-13965 * the balancer. For details, refer to HBASE-13965
*/ */
@InterfaceAudience.Private
public interface MetricsStochasticBalancerSource extends MetricsBalancerSource { public interface MetricsStochasticBalancerSource extends MetricsBalancerSource {
/** /**

View File

@ -18,10 +18,13 @@
package org.apache.hadoop.hbase.metrics; package org.apache.hadoop.hbase.metrics;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* BaseSource for dynamic metrics to announce to Metrics2. * BaseSource for dynamic metrics to announce to Metrics2.
* In hbase-hadoop{1|2}-compat there is an implementation of this interface. * In hbase-hadoop{1|2}-compat there is an implementation of this interface.
*/ */
@InterfaceAudience.Private
public interface BaseSource { public interface BaseSource {
String HBASE_METRICS_SYSTEM_NAME = "HBase"; String HBASE_METRICS_SYSTEM_NAME = "HBase";

View File

@ -18,10 +18,13 @@
package org.apache.hadoop.hbase.metrics; package org.apache.hadoop.hbase.metrics;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Common interface for metrics source implementations which need to track individual exception * Common interface for metrics source implementations which need to track individual exception
* types thrown or received. * types thrown or received.
*/ */
@InterfaceAudience.Private
public interface ExceptionTrackingSource extends BaseSource { public interface ExceptionTrackingSource extends BaseSource {
String EXCEPTIONS_NAME="exceptions"; String EXCEPTIONS_NAME="exceptions";
String EXCEPTIONS_DESC="Exceptions caused by requests"; String EXCEPTIONS_DESC="Exceptions caused by requests";

View File

@ -18,9 +18,12 @@
package org.apache.hadoop.hbase.metrics; package org.apache.hadoop.hbase.metrics;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Interface for sources that will export JvmPauseMonitor metrics * Interface for sources that will export JvmPauseMonitor metrics
*/ */
@InterfaceAudience.Private
public interface JvmPauseMonitorSource { public interface JvmPauseMonitorSource {
String INFO_THRESHOLD_COUNT_KEY = "pauseInfoThresholdExceeded"; String INFO_THRESHOLD_COUNT_KEY = "pauseInfoThresholdExceeded";

View File

@ -19,10 +19,12 @@
package org.apache.hadoop.hbase.metrics; package org.apache.hadoop.hbase.metrics;
import javax.management.ObjectName; import javax.management.ObjectName;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Object that will register an mbean with the underlying metrics implementation. * Object that will register an mbean with the underlying metrics implementation.
*/ */
@InterfaceAudience.Private
public interface MBeanSource { public interface MBeanSource {
/** /**

View File

@ -18,12 +18,15 @@
package org.apache.hadoop.hbase.metrics; package org.apache.hadoop.hbase.metrics;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hbase.thirdparty.com.google.common.base.Preconditions; import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
/** /**
* Container class for commonly collected metrics for most operations. Instantiate this class to * Container class for commonly collected metrics for most operations. Instantiate this class to
* collect submitted count, failed count and time histogram for an operation. * collect submitted count, failed count and time histogram for an operation.
*/ */
@InterfaceAudience.Private
public class OperationMetrics { public class OperationMetrics {
private static final String SUBMITTED_COUNT = "SubmittedCount"; private static final String SUBMITTED_COUNT = "SubmittedCount";
private static final String TIME = "Time"; private static final String TIME = "Time";

View File

@ -19,11 +19,13 @@
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import org.apache.hadoop.hbase.metrics.BaseSource; import org.apache.hadoop.hbase.metrics.BaseSource;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* This interface will be implemented by a MetricsSource that will export metrics from * This interface will be implemented by a MetricsSource that will export metrics from
* HeapMemoryManager in RegionServer into the hadoop metrics system. * HeapMemoryManager in RegionServer into the hadoop metrics system.
*/ */
@InterfaceAudience.Private
public interface MetricsHeapMemoryManagerSource extends BaseSource { public interface MetricsHeapMemoryManagerSource extends BaseSource {
/** /**
* The name of the metrics * The name of the metrics

View File

@ -19,11 +19,13 @@
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import org.apache.hadoop.hbase.metrics.BaseSource; import org.apache.hadoop.hbase.metrics.BaseSource;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* This interface will be implemented by a MetricsSource that will export metrics from * This interface will be implemented by a MetricsSource that will export metrics from
* multiple regions into the hadoop metrics system. * multiple regions into the hadoop metrics system.
*/ */
@InterfaceAudience.Private
public interface MetricsRegionAggregateSource extends BaseSource { public interface MetricsRegionAggregateSource extends BaseSource {
/** /**

View File

@ -17,10 +17,12 @@
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import org.apache.hadoop.hbase.metrics.BaseSource; import org.apache.hadoop.hbase.metrics.BaseSource;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* A collection of exposed metrics for space quotas from an HBase RegionServer. * A collection of exposed metrics for space quotas from an HBase RegionServer.
*/ */
@InterfaceAudience.Private
public interface MetricsRegionServerQuotaSource extends BaseSource { public interface MetricsRegionServerQuotaSource extends BaseSource {
String METRICS_NAME = "Quotas"; String METRICS_NAME = "Quotas";

View File

@ -20,10 +20,12 @@ package org.apache.hadoop.hbase.regionserver;
import org.apache.hadoop.hbase.metrics.BaseSource; import org.apache.hadoop.hbase.metrics.BaseSource;
import org.apache.hadoop.hbase.metrics.JvmPauseMonitorSource; import org.apache.hadoop.hbase.metrics.JvmPauseMonitorSource;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Interface for classes that expose metrics about the regionserver. * Interface for classes that expose metrics about the regionserver.
*/ */
@InterfaceAudience.Private
public interface MetricsRegionServerSource extends BaseSource, JvmPauseMonitorSource { public interface MetricsRegionServerSource extends BaseSource, JvmPauseMonitorSource {
/** /**

View File

@ -20,10 +20,12 @@ package org.apache.hadoop.hbase.regionserver;
import org.apache.hadoop.hbase.io.MetricsIOSource; import org.apache.hadoop.hbase.io.MetricsIOSource;
import org.apache.hadoop.hbase.io.MetricsIOWrapper; import org.apache.hadoop.hbase.io.MetricsIOWrapper;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Interface of a factory to create Metrics Sources used inside of regionservers. * Interface of a factory to create Metrics Sources used inside of regionservers.
*/ */
@InterfaceAudience.Private
public interface MetricsRegionServerSourceFactory { public interface MetricsRegionServerSourceFactory {
/** /**

View File

@ -18,10 +18,13 @@
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* This is the interface that will expose RegionServer information to hadoop1/hadoop2 * This is the interface that will expose RegionServer information to hadoop1/hadoop2
* implementations of the MetricsRegionServerSource. * implementations of the MetricsRegionServerSource.
*/ */
@InterfaceAudience.Private
public interface MetricsRegionServerWrapper { public interface MetricsRegionServerWrapper {
/** /**

View File

@ -18,11 +18,13 @@
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* This interface will be implemented to allow single regions to push metrics into * This interface will be implemented to allow single regions to push metrics into
* MetricsRegionAggregateSource that will in turn push data to the Hadoop metrics system. * MetricsRegionAggregateSource that will in turn push data to the Hadoop metrics system.
*/ */
@InterfaceAudience.Private
public interface MetricsRegionSource extends Comparable<MetricsRegionSource> { public interface MetricsRegionSource extends Comparable<MetricsRegionSource> {
String OPS_SAMPLE_NAME = "ops"; String OPS_SAMPLE_NAME = "ops";

View File

@ -18,10 +18,13 @@
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Interface of class that will wrap an HRegion and export numbers so they can be * Interface of class that will wrap an HRegion and export numbers so they can be
* used in MetricsRegionSource * used in MetricsRegionSource
*/ */
@InterfaceAudience.Private
public interface MetricsRegionWrapper { public interface MetricsRegionWrapper {
/** /**

View File

@ -19,11 +19,13 @@
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import org.apache.hadoop.hbase.metrics.BaseSource; import org.apache.hadoop.hbase.metrics.BaseSource;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* This interface will be implemented by a MetricsSource that will export metrics from * This interface will be implemented by a MetricsSource that will export metrics from
* multiple regions of a table into the hadoop metrics system. * multiple regions of a table into the hadoop metrics system.
*/ */
@InterfaceAudience.Private
public interface MetricsTableAggregateSource extends BaseSource { public interface MetricsTableAggregateSource extends BaseSource {
/** /**

View File

@ -16,9 +16,12 @@
*/ */
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Latency metrics for a specific table in a RegionServer. * Latency metrics for a specific table in a RegionServer.
*/ */
@InterfaceAudience.Private
public interface MetricsTableLatencies { public interface MetricsTableLatencies {
/** /**

View File

@ -18,10 +18,13 @@
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* This interface will be implemented to allow region server to push table metrics into * This interface will be implemented to allow region server to push table metrics into
* MetricsRegionAggregateSource that will in turn push data to the Hadoop metrics system. * MetricsRegionAggregateSource that will in turn push data to the Hadoop metrics system.
*/ */
@InterfaceAudience.Private
public interface MetricsTableSource extends Comparable<MetricsTableSource> { public interface MetricsTableSource extends Comparable<MetricsTableSource> {
String READ_REQUEST_COUNT = "readRequestCount"; String READ_REQUEST_COUNT = "readRequestCount";

View File

@ -18,11 +18,13 @@
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Interface of class that will wrap a MetricsTableSource and export numbers so they can be * Interface of class that will wrap a MetricsTableSource and export numbers so they can be
* used in MetricsTableSource * used in MetricsTableSource
*/ */
@InterfaceAudience.Private
public interface MetricsTableWrapperAggregate { public interface MetricsTableWrapperAggregate {
/** /**

View File

@ -19,10 +19,12 @@
package org.apache.hadoop.hbase.regionserver.wal; package org.apache.hadoop.hbase.regionserver.wal;
import org.apache.hadoop.hbase.metrics.BaseSource; import org.apache.hadoop.hbase.metrics.BaseSource;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Interface of the source that will export metrics about the region server's WAL. * Interface of the source that will export metrics about the region server's WAL.
*/ */
@InterfaceAudience.Private
public interface MetricsWALSource extends BaseSource { public interface MetricsWALSource extends BaseSource {

View File

@ -18,6 +18,9 @@
package org.apache.hadoop.hbase.replication.regionserver; package org.apache.hadoop.hbase.replication.regionserver;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public interface MetricsReplicationSinkSource { public interface MetricsReplicationSinkSource {
public static final String SINK_AGE_OF_LAST_APPLIED_OP = "sink.ageOfLastAppliedOp"; public static final String SINK_AGE_OF_LAST_APPLIED_OP = "sink.ageOfLastAppliedOp";
public static final String SINK_APPLIED_BATCHES = "sink.appliedBatches"; public static final String SINK_APPLIED_BATCHES = "sink.appliedBatches";

View File

@ -19,11 +19,13 @@
package org.apache.hadoop.hbase.replication.regionserver; package org.apache.hadoop.hbase.replication.regionserver;
import org.apache.hadoop.hbase.metrics.BaseSource; import org.apache.hadoop.hbase.metrics.BaseSource;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Provides access to gauges and counters. Implementers will hide the details of hadoop1 or * Provides access to gauges and counters. Implementers will hide the details of hadoop1 or
* hadoop2's metrics2 classes and publishing. * hadoop2's metrics2 classes and publishing.
*/ */
@InterfaceAudience.Private
public interface MetricsReplicationSource extends BaseSource { public interface MetricsReplicationSource extends BaseSource {
/** /**
* The name of the metrics * The name of the metrics

View File

@ -18,6 +18,9 @@
package org.apache.hadoop.hbase.replication.regionserver; package org.apache.hadoop.hbase.replication.regionserver;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public interface MetricsReplicationSourceFactory { public interface MetricsReplicationSourceFactory {
public MetricsReplicationSinkSource getSink(); public MetricsReplicationSinkSource getSink();
public MetricsReplicationSourceSource getSource(String id); public MetricsReplicationSourceSource getSource(String id);

View File

@ -19,7 +19,9 @@
package org.apache.hadoop.hbase.replication.regionserver; package org.apache.hadoop.hbase.replication.regionserver;
import org.apache.hadoop.hbase.metrics.BaseSource; import org.apache.hadoop.hbase.metrics.BaseSource;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public interface MetricsReplicationSourceSource extends BaseSource { public interface MetricsReplicationSourceSource extends BaseSource {
public static final String SOURCE_SIZE_OF_LOG_QUEUE = "source.sizeOfLogQueue"; public static final String SOURCE_SIZE_OF_LOG_QUEUE = "source.sizeOfLogQueue";

View File

@ -20,10 +20,12 @@ package org.apache.hadoop.hbase.rest;
import org.apache.hadoop.hbase.metrics.BaseSource; import org.apache.hadoop.hbase.metrics.BaseSource;
import org.apache.hadoop.hbase.metrics.JvmPauseMonitorSource; import org.apache.hadoop.hbase.metrics.JvmPauseMonitorSource;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Interface of the Metrics Source that will export data to Hadoop's Metrics2 system. * Interface of the Metrics Source that will export data to Hadoop's Metrics2 system.
*/ */
@InterfaceAudience.Private
public interface MetricsRESTSource extends BaseSource, JvmPauseMonitorSource { public interface MetricsRESTSource extends BaseSource, JvmPauseMonitorSource {
String METRICS_NAME = "REST"; String METRICS_NAME = "REST";

View File

@ -20,10 +20,12 @@ package org.apache.hadoop.hbase.thrift;
import org.apache.hadoop.hbase.metrics.ExceptionTrackingSource; import org.apache.hadoop.hbase.metrics.ExceptionTrackingSource;
import org.apache.hadoop.hbase.metrics.JvmPauseMonitorSource; import org.apache.hadoop.hbase.metrics.JvmPauseMonitorSource;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Interface of a class that will export metrics about Thrift to hadoop's metrics2. * Interface of a class that will export metrics about Thrift to hadoop's metrics2.
*/ */
@InterfaceAudience.Private
public interface MetricsThriftServerSource extends ExceptionTrackingSource, JvmPauseMonitorSource { public interface MetricsThriftServerSource extends ExceptionTrackingSource, JvmPauseMonitorSource {
String BATCH_GET_KEY = "batchGet"; String BATCH_GET_KEY = "batchGet";

View File

@ -18,7 +18,10 @@
package org.apache.hadoop.hbase.thrift; package org.apache.hadoop.hbase.thrift;
import org.apache.yetus.audience.InterfaceAudience;
/** Factory that will be used to create metrics sources for the two diffent types of thrift servers. */ /** Factory that will be used to create metrics sources for the two diffent types of thrift servers. */
@InterfaceAudience.Private
public interface MetricsThriftServerSourceFactory { public interface MetricsThriftServerSourceFactory {
String METRICS_NAME = "Thrift"; String METRICS_NAME = "Thrift";

View File

@ -18,10 +18,12 @@
package org.apache.hadoop.hbase.zookeeper; package org.apache.hadoop.hbase.zookeeper;
import org.apache.hadoop.hbase.metrics.BaseSource; import org.apache.hadoop.hbase.metrics.BaseSource;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Interface of the source that will export metrics about the ZooKeeper. * Interface of the source that will export metrics about the ZooKeeper.
*/ */
@InterfaceAudience.Private
public interface MetricsZooKeeperSource extends BaseSource { public interface MetricsZooKeeperSource extends BaseSource {
/** /**

View File

@ -18,10 +18,13 @@
package org.apache.hadoop.metrics2; package org.apache.hadoop.metrics2;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Metrics Histogram interface. Implementing classes will expose computed * Metrics Histogram interface. Implementing classes will expose computed
* quartile values through the metrics system. * quartile values through the metrics system.
*/ */
@InterfaceAudience.Private
public interface MetricHistogram { public interface MetricHistogram {
//Strings used to create metrics names. //Strings used to create metrics names.

View File

@ -19,10 +19,12 @@
package org.apache.hadoop.metrics2; package org.apache.hadoop.metrics2;
import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.ScheduledExecutorService;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* ScheduledExecutorService for metrics. * ScheduledExecutorService for metrics.
*/ */
@InterfaceAudience.Private
public interface MetricsExecutor { public interface MetricsExecutor {
ScheduledExecutorService getExecutor(); ScheduledExecutorService getExecutor();

View File

@ -67,6 +67,10 @@ limitations under the License.
</execution> </execution>
</executions> </executions>
</plugin> </plugin>
<plugin>
<groupId>net.revelc.code</groupId>
<artifactId>warbucks-maven-plugin</artifactId>
</plugin>
</plugins> </plugins>
<pluginManagement> <pluginManagement>
<plugins> <plugins>

View File

@ -23,7 +23,9 @@ import org.apache.hadoop.metrics2.MetricHistogram;
import org.apache.hadoop.metrics2.MetricsCollector; import org.apache.hadoop.metrics2.MetricsCollector;
import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.lib.Interns; import org.apache.hadoop.metrics2.lib.Interns;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class MetricsIOSourceImpl extends BaseSourceImpl implements MetricsIOSource { public class MetricsIOSourceImpl extends BaseSourceImpl implements MetricsIOSource {
private final MetricsIOWrapper wrapper; private final MetricsIOWrapper wrapper;

View File

@ -19,11 +19,13 @@
package org.apache.hadoop.hbase.metrics; package org.apache.hadoop.hbase.metrics;
import org.apache.hadoop.metrics2.lib.MutableFastCounter; import org.apache.hadoop.metrics2.lib.MutableFastCounter;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Common base implementation for metrics sources which need to track exceptions thrown or * Common base implementation for metrics sources which need to track exceptions thrown or
* received. * received.
*/ */
@InterfaceAudience.Private
public class ExceptionTrackingSourceImpl extends BaseSourceImpl public class ExceptionTrackingSourceImpl extends BaseSourceImpl
implements ExceptionTrackingSource { implements ExceptionTrackingSource {
protected MutableFastCounter exceptions; protected MutableFastCounter exceptions;

View File

@ -17,15 +17,13 @@
*/ */
package org.apache.hadoop.hbase.metrics.impl; package org.apache.hadoop.hbase.metrics.impl;
import java.util.Collection; import java.util.Collection;
import java.util.HashMap; import java.util.HashMap;
import java.util.Iterator; import java.util.Iterator;
import java.util.Map.Entry; import java.util.Map.Entry;
import java.util.Optional;
import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicBoolean;
import java.util.Optional;
import org.apache.hadoop.hbase.metrics.MetricRegistries; import org.apache.hadoop.hbase.metrics.MetricRegistries;
import org.apache.hadoop.hbase.metrics.MetricRegistry; import org.apache.hadoop.hbase.metrics.MetricRegistry;
import org.apache.hadoop.hbase.metrics.MetricRegistryInfo; import org.apache.hadoop.hbase.metrics.MetricRegistryInfo;
@ -36,8 +34,10 @@ import org.apache.hadoop.metrics2.impl.JmxCacheBuster;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystem;
import org.apache.hadoop.metrics2.lib.DefaultMetricsSystemHelper; import org.apache.hadoop.metrics2.lib.DefaultMetricsSystemHelper;
import org.apache.hadoop.metrics2.lib.MetricsExecutorImpl; import org.apache.hadoop.metrics2.lib.MetricsExecutorImpl;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
/** /**
@ -62,7 +62,8 @@ import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesti
* initialization should be moved here. * initialization should be moved here.
* </p> * </p>
*/ */
public class GlobalMetricRegistriesAdapter { @InterfaceAudience.Private
public final class GlobalMetricRegistriesAdapter {
private static final Logger LOG = LoggerFactory.getLogger(GlobalMetricRegistriesAdapter.class); private static final Logger LOG = LoggerFactory.getLogger(GlobalMetricRegistriesAdapter.class);

View File

@ -33,7 +33,6 @@
package org.apache.hadoop.hbase.metrics.impl; package org.apache.hadoop.hbase.metrics.impl;
import java.util.Map; import java.util.Map;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hbase.metrics.Counter; import org.apache.hadoop.hbase.metrics.Counter;
import org.apache.hadoop.hbase.metrics.Gauge; import org.apache.hadoop.hbase.metrics.Gauge;
@ -48,6 +47,7 @@ import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.hadoop.metrics2.lib.Interns; import org.apache.hadoop.metrics2.lib.Interns;
import org.apache.hadoop.metrics2.lib.MutableHistogram; import org.apache.hadoop.metrics2.lib.MutableHistogram;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -59,6 +59,7 @@ import org.slf4j.LoggerFactory;
* *
* Some of the code is forked from https://github.com/joshelser/dropwizard-hadoop-metrics2. * Some of the code is forked from https://github.com/joshelser/dropwizard-hadoop-metrics2.
*/ */
@InterfaceAudience.Private
public class HBaseMetrics2HadoopMetricsAdapter { public class HBaseMetrics2HadoopMetricsAdapter {
private static final Logger LOG private static final Logger LOG
= LoggerFactory.getLogger(HBaseMetrics2HadoopMetricsAdapter.class); = LoggerFactory.getLogger(HBaseMetrics2HadoopMetricsAdapter.class);

View File

@ -21,7 +21,9 @@ package org.apache.hadoop.hbase.replication.regionserver;
import org.apache.hadoop.metrics2.lib.MutableFastCounter; import org.apache.hadoop.metrics2.lib.MutableFastCounter;
import org.apache.hadoop.metrics2.lib.MutableGaugeLong; import org.apache.hadoop.metrics2.lib.MutableGaugeLong;
import org.apache.hadoop.metrics2.lib.MutableHistogram; import org.apache.hadoop.metrics2.lib.MutableHistogram;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class MetricsReplicationGlobalSourceSource implements MetricsReplicationSourceSource{ public class MetricsReplicationGlobalSourceSource implements MetricsReplicationSourceSource{
private static final String KEY_PREFIX = "source."; private static final String KEY_PREFIX = "source.";

View File

@ -19,9 +19,10 @@
package org.apache.hadoop.hbase.replication.regionserver; package org.apache.hadoop.hbase.replication.regionserver;
import org.apache.hadoop.metrics2.lib.MutableFastCounter; import org.apache.hadoop.metrics2.lib.MutableFastCounter;
import org.apache.hadoop.metrics2.lib.MutableGaugeLong;
import org.apache.hadoop.metrics2.lib.MutableHistogram; import org.apache.hadoop.metrics2.lib.MutableHistogram;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class MetricsReplicationSinkSourceImpl implements MetricsReplicationSinkSource { public class MetricsReplicationSinkSourceImpl implements MetricsReplicationSinkSource {
private final MutableHistogram ageHist; private final MutableHistogram ageHist;

View File

@ -17,6 +17,9 @@
*/ */
package org.apache.hadoop.hbase.replication.regionserver; package org.apache.hadoop.hbase.replication.regionserver;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class MetricsReplicationSourceFactoryImpl implements MetricsReplicationSourceFactory { public class MetricsReplicationSourceFactoryImpl implements MetricsReplicationSourceFactory {
private static enum SourceHolder { private static enum SourceHolder {

View File

@ -20,7 +20,9 @@ package org.apache.hadoop.hbase.replication.regionserver;
import org.apache.hadoop.metrics2.lib.MutableFastCounter; import org.apache.hadoop.metrics2.lib.MutableFastCounter;
import org.apache.hadoop.metrics2.lib.MutableGaugeLong; import org.apache.hadoop.metrics2.lib.MutableGaugeLong;
import org.apache.hadoop.metrics2.lib.MutableHistogram; import org.apache.hadoop.metrics2.lib.MutableHistogram;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class MetricsReplicationSourceSourceImpl implements MetricsReplicationSourceSource { public class MetricsReplicationSourceSourceImpl implements MetricsReplicationSourceSource {
private final MetricsReplicationSourceImpl rms; private final MetricsReplicationSourceImpl rms;

View File

@ -20,9 +20,11 @@ package org.apache.hadoop.metrics2.lib;
import java.lang.reflect.Field; import java.lang.reflect.Field;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.util.HashMap; import java.util.HashMap;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
public class DefaultMetricsSystemHelper { public class DefaultMetricsSystemHelper {
private static final Logger LOG = LoggerFactory.getLogger(DefaultMetricsSystemHelper.class); private static final Logger LOG = LoggerFactory.getLogger(DefaultMetricsSystemHelper.class);

View File

@ -22,7 +22,9 @@ import java.util.concurrent.atomic.LongAdder;
import org.apache.hadoop.metrics2.MetricsInfo; import org.apache.hadoop.metrics2.MetricsInfo;
import org.apache.hadoop.metrics2.MetricsRecordBuilder; import org.apache.hadoop.metrics2.MetricsRecordBuilder;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class MutableFastCounter extends MutableCounter { public class MutableFastCounter extends MutableCounter {
private final LongAdder counter; private final LongAdder counter;

View File

@ -120,6 +120,10 @@
</systemPropertyVariables> </systemPropertyVariables>
</configuration> </configuration>
</plugin> </plugin>
<plugin>
<groupId>net.revelc.code</groupId>
<artifactId>warbucks-maven-plugin</artifactId>
</plugin>
</plugins> </plugins>
<!-- General Resources --> <!-- General Resources -->
<pluginManagement> <pluginManagement>

View File

@ -18,10 +18,12 @@
package org.apache.hadoop.hbase.http; package org.apache.hadoop.hbase.http;
import java.util.Map; import java.util.Map;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* A container interface to add javax.servlet.Filter. * A container interface to add javax.servlet.Filter.
*/ */
@InterfaceAudience.Private
public interface FilterContainer { public interface FilterContainer {
/** /**
* Add a filter to the container. * Add a filter to the container.

View File

@ -18,10 +18,12 @@
package org.apache.hadoop.hbase.http; package org.apache.hadoop.hbase.http;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Initialize a javax.servlet.Filter. * Initialize a javax.servlet.Filter.
*/ */
@InterfaceAudience.Private
public abstract class FilterInitializer { public abstract class FilterInitializer {
/** /**
* Initialize a Filter to a FilterContainer. * Initialize a Filter to a FilterContainer.

View File

@ -20,11 +20,13 @@ package org.apache.hadoop.hbase.http;
import java.io.ByteArrayOutputStream; import java.io.ByteArrayOutputStream;
import java.io.IOException; import java.io.IOException;
import java.io.OutputStream; import java.io.OutputStream;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* This class is responsible for quoting HTML characters. * This class is responsible for quoting HTML characters.
*/ */
public class HtmlQuoting { @InterfaceAudience.Private
public final class HtmlQuoting {
private static final byte[] ampBytes = "&amp;".getBytes(); private static final byte[] ampBytes = "&amp;".getBytes();
private static final byte[] aposBytes = "&apos;".getBytes(); private static final byte[] aposBytes = "&apos;".getBytes();
private static final byte[] gtBytes = "&gt;".getBytes(); private static final byte[] gtBytes = "&gt;".getBytes();
@ -212,4 +214,5 @@ public class HtmlQuoting {
} }
} }
private HtmlQuoting() {}
} }

View File

@ -18,11 +18,11 @@
package org.apache.hadoop.hbase.http; package org.apache.hadoop.hbase.http;
import java.util.HashMap; import java.util.HashMap;
import org.apache.commons.logging.LogConfigurationException; import org.apache.commons.logging.LogConfigurationException;
import org.apache.commons.logging.impl.Log4JLogger; import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.log4j.Appender; import org.apache.log4j.Appender;
import org.apache.log4j.LogManager; import org.apache.log4j.LogManager;
import org.apache.yetus.audience.InterfaceAudience;
import org.eclipse.jetty.server.NCSARequestLog; import org.eclipse.jetty.server.NCSARequestLog;
import org.eclipse.jetty.server.RequestLog; import org.eclipse.jetty.server.RequestLog;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -32,7 +32,8 @@ import org.slf4j.impl.Log4jLoggerAdapter;
/** /**
* RequestLog object for use with Http * RequestLog object for use with Http
*/ */
public class HttpRequestLog { @InterfaceAudience.Private
public final class HttpRequestLog {
private static final Logger LOG = LoggerFactory.getLogger(HttpRequestLog.class); private static final Logger LOG = LoggerFactory.getLogger(HttpRequestLog.class);
private static final HashMap<String, String> serverToComponent; private static final HashMap<String, String> serverToComponent;
@ -101,4 +102,6 @@ public class HttpRequestLog {
return null; return null;
} }
} }
private HttpRequestLog() {}
} }

View File

@ -19,10 +19,12 @@ package org.apache.hadoop.hbase.http;
import org.apache.log4j.spi.LoggingEvent; import org.apache.log4j.spi.LoggingEvent;
import org.apache.log4j.AppenderSkeleton; import org.apache.log4j.AppenderSkeleton;
import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Log4j Appender adapter for HttpRequestLog * Log4j Appender adapter for HttpRequestLog
*/ */
@InterfaceAudience.Private
public class HttpRequestLogAppender extends AppenderSkeleton { public class HttpRequestLogAppender extends AppenderSkeleton {
private String filename; private String filename;

View File

@ -17,15 +17,17 @@
*/ */
package org.apache.hadoop.hbase.http; package org.apache.hadoop.hbase.http;
import org.eclipse.jetty.security.ConstraintSecurityHandler; import org.apache.yetus.audience.InterfaceAudience;
import org.eclipse.jetty.util.security.Constraint;
import org.eclipse.jetty.security.ConstraintMapping; import org.eclipse.jetty.security.ConstraintMapping;
import org.eclipse.jetty.security.ConstraintSecurityHandler;
import org.eclipse.jetty.servlet.ServletContextHandler; import org.eclipse.jetty.servlet.ServletContextHandler;
import org.eclipse.jetty.util.security.Constraint;
/** /**
* HttpServer utility. * HttpServer utility.
*/ */
public class HttpServerUtil { @InterfaceAudience.Private
public final class HttpServerUtil {
/** /**
* Add constraints to a Jetty Context to disallow undesirable Http methods. * Add constraints to a Jetty Context to disallow undesirable Http methods.
* @param ctxHandler The context to modify * @param ctxHandler The context to modify
@ -49,4 +51,6 @@ public class HttpServerUtil {
ctxHandler.setSecurityHandler(securityHandler); ctxHandler.setSecurityHandler(securityHandler);
} }
private HttpServerUtil() {}
} }

View File

@ -20,7 +20,6 @@ package org.apache.hadoop.hbase.http.jmx;
import java.io.IOException; import java.io.IOException;
import java.io.PrintWriter; import java.io.PrintWriter;
import java.lang.management.ManagementFactory; import java.lang.management.ManagementFactory;
import javax.management.MBeanServer; import javax.management.MBeanServer;
import javax.management.MalformedObjectNameException; import javax.management.MalformedObjectNameException;
import javax.management.ObjectName; import javax.management.ObjectName;
@ -30,9 +29,9 @@ import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.hbase.http.HttpServer; import org.apache.hadoop.hbase.http.HttpServer;
import org.apache.hadoop.hbase.util.JSONBean; import org.apache.hadoop.hbase.util.JSONBean;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -110,6 +109,7 @@ import org.slf4j.LoggerFactory;
* </p> * </p>
* *
*/ */
@InterfaceAudience.Private
public class JMXJsonServlet extends HttpServlet { public class JMXJsonServlet extends HttpServlet {
private static final Logger LOG = LoggerFactory.getLogger(JMXJsonServlet.class); private static final Logger LOG = LoggerFactory.getLogger(JMXJsonServlet.class);

View File

@ -24,28 +24,26 @@ import java.io.PrintWriter;
import java.net.URL; import java.net.URL;
import java.net.URLConnection; import java.net.URLConnection;
import java.util.regex.Pattern; import java.util.regex.Pattern;
import javax.servlet.ServletException; import javax.servlet.ServletException;
import javax.servlet.http.HttpServlet; import javax.servlet.http.HttpServlet;
import javax.servlet.http.HttpServletRequest; import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse; import javax.servlet.http.HttpServletResponse;
import org.apache.commons.logging.impl.Jdk14Logger; import org.apache.commons.logging.impl.Jdk14Logger;
import org.apache.commons.logging.impl.Log4JLogger; import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.hbase.http.HttpServer;
import org.apache.hadoop.util.ServletUtil;
import org.apache.log4j.LogManager;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability; import org.apache.yetus.audience.InterfaceStability;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
import org.slf4j.impl.Log4jLoggerAdapter; import org.slf4j.impl.Log4jLoggerAdapter;
import org.apache.hadoop.hbase.http.HttpServer;
import org.apache.hadoop.util.ServletUtil;
import org.apache.log4j.LogManager;
/** /**
* Change log level in runtime. * Change log level in runtime.
*/ */
@InterfaceStability.Evolving @InterfaceAudience.Private
public class LogLevel { public final class LogLevel {
public static final String USAGES = "\nUsage: General options are:\n" public static final String USAGES = "\nUsage: General options are:\n"
+ "\t[-getlevel <host:httpPort> <name>]\n" + "\t[-getlevel <host:httpPort> <name>]\n"
+ "\t[-setlevel <host:httpPort> <name> <level>]\n"; + "\t[-setlevel <host:httpPort> <name> <level>]\n";
@ -174,4 +172,6 @@ public class LogLevel {
out.println(MARKER + "Effective level: <b>" + lev + "</b><br />"); out.println(MARKER + "Effective level: <b>" + lev + "</b><br />");
} }
} }
private LogLevel() {}
} }

Some files were not shown because too many files have changed in this diff Show More