HBASE-17823 Migrate to Apache Yetus Audience Annotations

Signed-off-by: Michael Stack <stack@apache.org>
Signed-off-by: Misty Stanley-Jones <misty@apache.org>
This commit is contained in:
Sean Busbey 2017-08-19 03:23:52 -05:00
parent 3df0351f22
commit 4b124913f0
2058 changed files with 2491 additions and 3175 deletions

View File

@ -28,8 +28,8 @@
# $ ./checkcompatibility.py ${SOURCE_GIT_REVISION} ${GIT_BRANCH_OR_TAG} # $ ./checkcompatibility.py ${SOURCE_GIT_REVISION} ${GIT_BRANCH_OR_TAG}
# or with some options: # or with some options:
# $ ./dev-support/checkcompatibility.py \ # $ ./dev-support/checkcompatibility.py \
# --annotation org.apache.hadoop.hbase.classification.InterfaceAudience.Public \ # --annotation org.apache.yetus.audience.InterfaceAudience.Public \
# --annotation org.apache.hadoop.hbase.classification.InterfaceAudience.LimitedPrivate \ # --annotation org.apache.yetus.audience.InterfaceAudience.LimitedPrivate \
# --include-file "hbase-*" \ # --include-file "hbase-*" \
# --known_problems_path ~/known_problems.json \ # --known_problems_path ~/known_problems.json \
# rel/1.0.0 branch-1.2 # rel/1.0.0 branch-1.2

View File

@ -21,46 +21,13 @@
--> -->
<modelVersion>4.0.0</modelVersion> <modelVersion>4.0.0</modelVersion>
<parent> <parent>
<artifactId>hbase-build-configuration</artifactId> <artifactId>hbase</artifactId>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<version>3.0.0-SNAPSHOT</version> <version>3.0.0-SNAPSHOT</version>
<relativePath>../hbase-build-configuration</relativePath> <relativePath>..</relativePath>
</parent> </parent>
<artifactId>hbase-annotations</artifactId> <artifactId>hbase-annotations</artifactId>
<name>Apache HBase - Annotations</name> <name>Apache HBase - Annotations</name>
<description>Copy of Hadoop's annotations for HBase</description> <description>Annotations for tests</description>
<profiles>
<profile>
<id>build-with-jdk7</id>
<activation>
<jdk>1.7</jdk>
</activation>
<dependencies>
<dependency>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
<version>1.7</version>
<scope>system</scope>
<systemPath>${java.home}/../lib/tools.jar</systemPath>
</dependency>
</dependencies>
</profile>
<profile>
<id>build-with-jdk8</id>
<activation>
<jdk>1.8</jdk>
</activation>
<dependencies>
<dependency>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
<version>1.8</version>
<scope>system</scope>
<systemPath>${java.home}/../lib/tools.jar</systemPath>
</dependency>
</dependencies>
</profile>
</profiles>
</project> </project>

View File

@ -1,72 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.classification;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* Annotation to inform users of a package, class or method's intended audience.
* Currently the audience can be {@link Public}, {@link LimitedPrivate} or
* {@link Private}. <br>
* All public classes must have InterfaceAudience annotation. <br>
* <ul>
* <li>Public classes that are not marked with this annotation must be
* considered by default as {@link Private}.</li>
*
* <li>External applications must only use classes that are marked
* {@link Public}. Avoid using non public classes as these classes
* could be removed or change in incompatible ways.</li>
*
* <li>Hadoop projects must only use classes that are marked
* {@link LimitedPrivate} or {@link Public}</li>
*
* <li> Methods may have a different annotation that it is more restrictive
* compared to the audience classification of the class. Example: A class
* might be {@link Public}, but a method may be {@link LimitedPrivate}
* </li></ul>
*/
@InterfaceAudience.Public
public final class InterfaceAudience {
/**
* Intended for use by any project or application.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
public @interface Public {}
/**
* Intended only for the project(s) specified in the annotation.
* For example, "Common", "HDFS", "MapReduce", "ZooKeeper", "HBase".
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
public @interface LimitedPrivate {
String[] value();
}
/**
* Intended for use only within Hadoop itself.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
public @interface Private {}
private InterfaceAudience() {} // Audience can't exist on its own
}

View File

@ -1,65 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.classification;
import java.lang.annotation.Documented;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
/**
* Annotation to inform users of how much to rely on a particular package,
* class or method not changing over time. Currently the stability can be
* {@link Stable}, {@link Evolving} or {@link Unstable}. <br>
*
* <ul><li>All classes that are annotated with
* {@link org.apache.hadoop.hbase.classification.InterfaceAudience.Public} or
* {@link org.apache.hadoop.hbase.classification.InterfaceAudience.LimitedPrivate}
* must have InterfaceStability annotation. </li>
* <li>Classes that are
* {@link org.apache.hadoop.hbase.classification.InterfaceAudience.LimitedPrivate}
* are to be considered unstable unless a different InterfaceStability annotation
* states otherwise.</li>
* <li>Incompatible changes must not be made to classes marked as stable.</li>
* </ul>
*/
@InterfaceAudience.Public
public class InterfaceStability {
/**
* Can evolve while retaining compatibility for minor release boundaries.;
* can break compatibility only at major release (ie. at m.0).
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
public @interface Stable {}
/**
* Evolving, but can break compatibility at minor release (i.e. m.x)
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
public @interface Evolving {}
/**
* No guarantee is provided as to reliability or stability across any
* level of release granularity.
*/
@Documented
@Retention(RetentionPolicy.RUNTIME)
public @interface Unstable {}
}

View File

@ -1,62 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.classification.tools;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import com.sun.javadoc.DocErrorReporter;
import com.sun.javadoc.LanguageVersion;
import com.sun.javadoc.RootDoc;
import com.sun.tools.doclets.standard.Standard;
/**
* A <a href="http://java.sun.com/javase/6/docs/jdk/api/javadoc/doclet/">Doclet</a>
* for excluding elements that are annotated with
* {@link org.apache.hadoop.hbase.classification.InterfaceAudience.Private} or
* {@link org.apache.hadoop.hbase.classification.InterfaceAudience.LimitedPrivate}.
* It delegates to the Standard Doclet, and takes the same options.
*/
@InterfaceAudience.Private
public final class ExcludePrivateAnnotationsStandardDoclet {
private ExcludePrivateAnnotationsStandardDoclet() {}
public static LanguageVersion languageVersion() {
return LanguageVersion.JAVA_1_5;
}
public static boolean start(RootDoc root) {
System.out.println(
ExcludePrivateAnnotationsStandardDoclet.class.getSimpleName());
return Standard.start(RootDocProcessor.process(root));
}
public static int optionLength(String option) {
Integer length = StabilityOptions.optionLength(option);
if (length != null) {
return length;
}
return Standard.optionLength(option);
}
public static boolean validOptions(String[][] options,
DocErrorReporter reporter) {
StabilityOptions.validOptions(options, reporter);
String[][] filteredOptions = StabilityOptions.filterOptions(options);
return Standard.validOptions(filteredOptions, reporter);
}
}

View File

@ -1,68 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.classification.tools;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import com.sun.javadoc.DocErrorReporter;
import com.sun.javadoc.LanguageVersion;
import com.sun.javadoc.RootDoc;
import com.sun.tools.doclets.standard.Standard;
/**
* A <a href="http://java.sun.com/javase/6/docs/jdk/api/javadoc/doclet/">Doclet</a>
* that only includes class-level elements that are annotated with
* {@link org.apache.hadoop.hbase.classification.InterfaceAudience.Public}.
* Class-level elements with no annotation are excluded.
* In addition, all elements that are annotated with
* {@link org.apache.hadoop.hbase.classification.InterfaceAudience.Private} or
* {@link org.apache.hadoop.hbase.classification.InterfaceAudience.LimitedPrivate}
* are also excluded.
* It delegates to the Standard Doclet, and takes the same options.
*/
@InterfaceAudience.Private
public final class IncludePublicAnnotationsStandardDoclet {
private IncludePublicAnnotationsStandardDoclet() {}
public static LanguageVersion languageVersion() {
return LanguageVersion.JAVA_1_5;
}
public static boolean start(RootDoc root) {
System.out.println(
IncludePublicAnnotationsStandardDoclet.class.getSimpleName());
RootDocProcessor.treatUnannotatedClassesAsPrivate = true;
return Standard.start(RootDocProcessor.process(root));
}
public static int optionLength(String option) {
Integer length = StabilityOptions.optionLength(option);
if (length != null) {
return length;
}
return Standard.optionLength(option);
}
public static boolean validOptions(String[][] options,
DocErrorReporter reporter) {
StabilityOptions.validOptions(options, reporter);
String[][] filteredOptions = StabilityOptions.filterOptions(options);
return Standard.validOptions(filteredOptions, reporter);
}
}

View File

@ -1,232 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.classification.tools;
import com.sun.javadoc.AnnotationDesc;
import com.sun.javadoc.AnnotationTypeDoc;
import com.sun.javadoc.ClassDoc;
import com.sun.javadoc.ConstructorDoc;
import com.sun.javadoc.Doc;
import com.sun.javadoc.FieldDoc;
import com.sun.javadoc.MethodDoc;
import com.sun.javadoc.PackageDoc;
import com.sun.javadoc.ProgramElementDoc;
import com.sun.javadoc.RootDoc;
import java.lang.reflect.Array;
import java.lang.reflect.InvocationHandler;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.WeakHashMap;
import org.apache.hadoop.hbase.classification.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability;
/**
* Process the {@link RootDoc} by substituting with (nested) proxy objects that
* exclude elements with Private or LimitedPrivate annotations.
* <p>
* Based on code from http://www.sixlegs.com/blog/java/exclude-javadoc-tag.html.
*/
final class RootDocProcessor {
static String stability = StabilityOptions.UNSTABLE_OPTION;
static boolean treatUnannotatedClassesAsPrivate = false;
private RootDocProcessor() {}
public static RootDoc process(RootDoc root) {
return (RootDoc) process(root, RootDoc.class);
}
private static Object process(Object obj, Class<?> type) {
if (obj == null) {
return null;
}
Class<?> cls = obj.getClass();
if (cls.getName().startsWith("com.sun.")) {
return getProxy(obj);
} else if (obj instanceof Object[]) {
Class<?> componentType = type.isArray() ? type.getComponentType()
: cls.getComponentType();
Object[] array = (Object[]) obj;
Object[] newArray = (Object[]) Array.newInstance(componentType,
array.length);
for (int i = 0; i < array.length; ++i) {
newArray[i] = process(array[i], componentType);
}
return newArray;
}
return obj;
}
private static Map<Object, Object> proxies = new WeakHashMap<>();
private static Object getProxy(Object obj) {
Object proxy = proxies.get(obj);
if (proxy == null) {
proxy = Proxy.newProxyInstance(obj.getClass().getClassLoader(),
obj.getClass().getInterfaces(), new ExcludeHandler(obj));
proxies.put(obj, proxy);
}
return proxy;
}
private static class ExcludeHandler implements InvocationHandler {
private Object target;
public ExcludeHandler(Object target) {
this.target = target;
}
@Override
public Object invoke(Object proxy, Method method, Object[] args) throws Throwable {
String methodName = method.getName();
if (target instanceof Doc) {
if (methodName.equals("isIncluded")) {
Doc doc = (Doc) target;
return !exclude(doc) && doc.isIncluded();
}
if (target instanceof RootDoc) {
switch (methodName) {
case "classes":
return filter(((RootDoc) target).classes(), ClassDoc.class);
case "specifiedClasses":
return filter(((RootDoc) target).specifiedClasses(), ClassDoc.class);
case "specifiedPackages":
return filter(((RootDoc) target).specifiedPackages(), PackageDoc.class);
}
} else if (target instanceof ClassDoc) {
if (isFiltered(args)) {
switch (methodName) {
case "methods":
return filter(((ClassDoc) target).methods(true), MethodDoc.class);
case "fields":
return filter(((ClassDoc) target).fields(true), FieldDoc.class);
case "innerClasses":
return filter(((ClassDoc) target).innerClasses(true), ClassDoc.class);
case "constructors":
return filter(((ClassDoc) target).constructors(true), ConstructorDoc.class);
}
}
} else if (target instanceof PackageDoc) {
switch (methodName) {
case "allClasses":
if (isFiltered(args)) {
return filter(((PackageDoc) target).allClasses(true), ClassDoc.class);
} else {
return filter(((PackageDoc) target).allClasses(), ClassDoc.class);
}
case "annotationTypes":
return filter(((PackageDoc) target).annotationTypes(), AnnotationTypeDoc.class);
case "enums":
return filter(((PackageDoc) target).enums(), ClassDoc.class);
case "errors":
return filter(((PackageDoc) target).errors(), ClassDoc.class);
case "exceptions":
return filter(((PackageDoc) target).exceptions(), ClassDoc.class);
case "interfaces":
return filter(((PackageDoc) target).interfaces(), ClassDoc.class);
case "ordinaryClasses":
return filter(((PackageDoc) target).ordinaryClasses(), ClassDoc.class);
}
}
}
if (args != null) {
if (methodName.equals("compareTo") || methodName.equals("equals")
|| methodName.equals("overrides") || methodName.equals("subclassOf")) {
args[0] = unwrap(args[0]);
}
}
try {
return process(method.invoke(target, args), method.getReturnType());
} catch (InvocationTargetException e) {
throw e.getTargetException();
}
}
private static boolean exclude(Doc doc) {
AnnotationDesc[] annotations = null;
if (doc instanceof ProgramElementDoc) {
annotations = ((ProgramElementDoc) doc).annotations();
} else if (doc instanceof PackageDoc) {
annotations = ((PackageDoc) doc).annotations();
}
if (annotations != null) {
for (AnnotationDesc annotation : annotations) {
String qualifiedTypeName = annotation.annotationType().qualifiedTypeName();
if (qualifiedTypeName.equals(InterfaceAudience.Private.class.getCanonicalName())
|| qualifiedTypeName
.equals(InterfaceAudience.LimitedPrivate.class.getCanonicalName())) {
return true;
}
if (stability.equals(StabilityOptions.EVOLVING_OPTION)) {
if (qualifiedTypeName.equals(InterfaceStability.Unstable.class.getCanonicalName())) {
return true;
}
}
if (stability.equals(StabilityOptions.STABLE_OPTION)) {
if (qualifiedTypeName.equals(InterfaceStability.Unstable.class.getCanonicalName())
|| qualifiedTypeName.equals(InterfaceStability.Evolving.class.getCanonicalName())) {
return true;
}
}
}
for (AnnotationDesc annotation : annotations) {
String qualifiedTypeName = annotation.annotationType().qualifiedTypeName();
if (qualifiedTypeName.equals(InterfaceAudience.Public.class.getCanonicalName())) {
return false;
}
}
}
if (treatUnannotatedClassesAsPrivate) {
return doc.isClass() || doc.isInterface() || doc.isAnnotationType();
}
return false;
}
private static Object[] filter(Doc[] array, Class<?> componentType) {
if (array == null || array.length == 0) {
return array;
}
List<Object> list = new ArrayList<>(array.length);
for (Doc entry : array) {
if (!exclude(entry)) {
list.add(process(entry, componentType));
}
}
return list.toArray((Object[]) Array.newInstance(componentType, list.size()));
}
private Object unwrap(Object proxy) {
if (proxy instanceof Proxy) {
return ((ExcludeHandler) Proxy.getInvocationHandler(proxy)).target;
}
return proxy;
}
private boolean isFiltered(Object[] args) {
return args != null && Boolean.TRUE.equals(args[0]);
}
}
}

View File

@ -1,78 +0,0 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.classification.tools;
import com.sun.javadoc.DocErrorReporter;
import java.util.ArrayList;
import java.util.List;
import java.util.Locale;
final class StabilityOptions {
private StabilityOptions() {}
public static final String STABLE_OPTION = "-stable";
public static final String EVOLVING_OPTION = "-evolving";
public static final String UNSTABLE_OPTION = "-unstable";
public static Integer optionLength(String option) {
String opt = option.toLowerCase(Locale.ROOT);
if (opt.equals(UNSTABLE_OPTION)) return 1;
if (opt.equals(EVOLVING_OPTION)) return 1;
if (opt.equals(STABLE_OPTION)) return 1;
return null;
}
public static void validOptions(String[][] options, DocErrorReporter reporter) {
for (String[] option : options) {
String opt = option[0].toLowerCase(Locale.ROOT);
switch (opt) {
case UNSTABLE_OPTION:
RootDocProcessor.stability = UNSTABLE_OPTION;
break;
case EVOLVING_OPTION:
RootDocProcessor.stability = EVOLVING_OPTION;
break;
case STABLE_OPTION:
RootDocProcessor.stability = STABLE_OPTION;
break;
default:
RootDocProcessor.stability = UNSTABLE_OPTION;
break;
}
}
}
public static String[][] filterOptions(String[][] options) {
List<String[]> optionsList = new ArrayList<>();
for (String[] option : options) {
if (!option[0].equalsIgnoreCase(UNSTABLE_OPTION)
&& !option[0].equalsIgnoreCase(EVOLVING_OPTION)
&& !option[0].equalsIgnoreCase(STABLE_OPTION)) {
optionsList.add(option);
}
}
String[][] filteredOptions = new String[optionsList.size()][];
int i = 0;
for (String[] option : optionsList) {
filteredOptions[i++] = option;
}
return filteredOptions;
}
}

View File

@ -83,16 +83,6 @@
</build> </build>
<dependencies> <dependencies>
<!-- Intra-project dependencies --> <!-- Intra-project dependencies -->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-annotations</artifactId>
<exclusions>
<exclusion>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-annotations</artifactId> <artifactId>hbase-annotations</artifactId>

View File

@ -24,7 +24,7 @@ import java.util.List;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.backup.util.BackupSet; import org.apache.hadoop.hbase.backup.util.BackupSet;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**
* The administrative API for HBase Backup. Construct an instance and call {@link #close()} * The administrative API for HBase Backup. Construct an instance and call {@link #close()}

View File

@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.backup.impl.IncrementalTableBackupClient;
import org.apache.hadoop.hbase.backup.impl.TableBackupClient; import org.apache.hadoop.hbase.backup.impl.TableBackupClient;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private @InterfaceAudience.Private
public class BackupClientFactory { public class BackupClientFactory {

View File

@ -23,7 +23,7 @@ import java.io.IOException;
import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.backup.impl.BackupManager; import org.apache.hadoop.hbase.backup.impl.BackupManager;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Backup copy job is a part of a backup process. The concrete implementation is responsible for * Backup copy job is a part of a backup process. The concrete implementation is responsible for

View File

@ -44,7 +44,7 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.backup.BackupRestoreConstants.BackupCommand; import org.apache.hadoop.hbase.backup.BackupRestoreConstants.BackupCommand;
import org.apache.hadoop.hbase.backup.impl.BackupCommands; import org.apache.hadoop.hbase.backup.impl.BackupCommands;
import org.apache.hadoop.hbase.backup.impl.BackupManager; import org.apache.hadoop.hbase.backup.impl.BackupManager;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.util.AbstractHBaseTool; import org.apache.hadoop.hbase.util.AbstractHBaseTool;
import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.util.ToolRunner; import org.apache.hadoop.util.ToolRunner;

View File

@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.backup.impl.BackupSystemTable; import org.apache.hadoop.hbase.backup.impl.BackupSystemTable;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.master.cleaner.BaseHFileCleanerDelegate; import org.apache.hadoop.hbase.master.cleaner.BaseHFileCleanerDelegate;

View File

@ -34,7 +34,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.backup.util.BackupUtils; import org.apache.hadoop.hbase.backup.util.BackupUtils;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.BackupInfo.Builder; import org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos.BackupInfo.Builder;

View File

@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.backup;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Backup merge operation job interface. Concrete implementation is provided by backup provider, see * Backup merge operation job interface. Concrete implementation is provided by backup provider, see

View File

@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.backup.impl.BackupManager; import org.apache.hadoop.hbase.backup.impl.BackupManager;
import org.apache.hadoop.hbase.backup.impl.BackupSystemTable; import org.apache.hadoop.hbase.backup.impl.BackupSystemTable;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.coprocessor.ObserverContext;

View File

@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.backup;
import java.util.List; import java.util.List;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**
* POJO class for backup request * POJO class for backup request

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.hbase.backup; package org.apache.hadoop.hbase.backup;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**
* BackupRestoreConstants holds a bunch of HBase Backup and Restore constants * BackupRestoreConstants holds a bunch of HBase Backup and Restore constants

View File

@ -21,7 +21,7 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.backup.mapreduce.MapReduceBackupCopyJob; import org.apache.hadoop.hbase.backup.mapreduce.MapReduceBackupCopyJob;
import org.apache.hadoop.hbase.backup.mapreduce.MapReduceBackupMergeJob; import org.apache.hadoop.hbase.backup.mapreduce.MapReduceBackupMergeJob;
import org.apache.hadoop.hbase.backup.mapreduce.MapReduceRestoreJob; import org.apache.hadoop.hbase.backup.mapreduce.MapReduceRestoreJob;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ReflectionUtils;
/** /**

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.hbase.backup; package org.apache.hadoop.hbase.backup;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos;

View File

@ -30,7 +30,7 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.backup.impl.BackupManifest; import org.apache.hadoop.hbase.backup.impl.BackupManifest;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**
* View to an on-disk Backup Image FileSytem Provides the set of methods necessary to interact with * View to an on-disk Backup Image FileSytem Provides the set of methods necessary to interact with
@ -143,4 +143,4 @@ public class HBackupFileSystem {
backupManifestMap.put(tableName, manifest); backupManifestMap.put(tableName, manifest);
} }
} }
} }

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hbase.backup; package org.apache.hadoop.hbase.backup;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;

View File

@ -49,7 +49,7 @@ import org.apache.hadoop.hbase.backup.impl.BackupAdminImpl;
import org.apache.hadoop.hbase.backup.impl.BackupManager; import org.apache.hadoop.hbase.backup.impl.BackupManager;
import org.apache.hadoop.hbase.backup.impl.BackupSystemTable; import org.apache.hadoop.hbase.backup.impl.BackupSystemTable;
import org.apache.hadoop.hbase.backup.util.BackupUtils; import org.apache.hadoop.hbase.backup.util.BackupUtils;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.util.AbstractHBaseTool; import org.apache.hadoop.hbase.util.AbstractHBaseTool;

View File

@ -23,7 +23,7 @@ import java.io.IOException;
import org.apache.hadoop.conf.Configurable; import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Restore operation job interface Concrete implementation is provided by backup provider, see * Restore operation job interface Concrete implementation is provided by backup provider, see

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hbase.backup; package org.apache.hadoop.hbase.backup;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**
* POJO class for restore request * POJO class for restore request

View File

@ -46,7 +46,7 @@ import org.apache.hadoop.hbase.backup.HBackupFileSystem;
import org.apache.hadoop.hbase.backup.RestoreRequest; import org.apache.hadoop.hbase.backup.RestoreRequest;
import org.apache.hadoop.hbase.backup.util.BackupSet; import org.apache.hadoop.hbase.backup.util.BackupSet;
import org.apache.hadoop.hbase.backup.util.BackupUtils; import org.apache.hadoop.hbase.backup.util.BackupUtils;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;

View File

@ -58,7 +58,7 @@ import org.apache.hadoop.hbase.backup.BackupRestoreConstants.BackupCommand;
import org.apache.hadoop.hbase.backup.BackupType; import org.apache.hadoop.hbase.backup.BackupType;
import org.apache.hadoop.hbase.backup.util.BackupSet; import org.apache.hadoop.hbase.backup.util.BackupSet;
import org.apache.hadoop.hbase.backup.util.BackupUtils; import org.apache.hadoop.hbase.backup.util.BackupUtils;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists; import org.apache.hadoop.hbase.shaded.com.google.common.collect.Lists;

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.backup.impl;
import org.apache.hadoop.hbase.HBaseIOException; import org.apache.hadoop.hbase.HBaseIOException;
import org.apache.hadoop.hbase.backup.BackupInfo; import org.apache.hadoop.hbase.backup.BackupInfo;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Backup exception * Backup exception

View File

@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.backup.impl.BackupManifest.BackupImage;
import org.apache.hadoop.hbase.backup.master.BackupLogCleaner; import org.apache.hadoop.hbase.backup.master.BackupLogCleaner;
import org.apache.hadoop.hbase.backup.master.LogRollMasterProcedureManager; import org.apache.hadoop.hbase.backup.master.LogRollMasterProcedureManager;
import org.apache.hadoop.hbase.backup.regionserver.LogRollRegionServerProcedureManager; import org.apache.hadoop.hbase.backup.regionserver.LogRollRegionServerProcedureManager;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.procedure.ProcedureManagerHost; import org.apache.hadoop.hbase.procedure.ProcedureManagerHost;

View File

@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.backup.BackupInfo;
import org.apache.hadoop.hbase.backup.BackupType; import org.apache.hadoop.hbase.backup.BackupType;
import org.apache.hadoop.hbase.backup.HBackupFileSystem; import org.apache.hadoop.hbase.backup.HBackupFileSystem;
import org.apache.hadoop.hbase.backup.util.BackupUtils; import org.apache.hadoop.hbase.backup.util.BackupUtils;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.BackupProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;

View File

@ -50,7 +50,7 @@ import org.apache.hadoop.hbase.backup.BackupInfo.BackupState;
import org.apache.hadoop.hbase.backup.BackupRestoreConstants; import org.apache.hadoop.hbase.backup.BackupRestoreConstants;
import org.apache.hadoop.hbase.backup.BackupType; import org.apache.hadoop.hbase.backup.BackupType;
import org.apache.hadoop.hbase.backup.util.BackupUtils; import org.apache.hadoop.hbase.backup.util.BackupUtils;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Delete;

View File

@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.backup.BackupRestoreFactory;
import org.apache.hadoop.hbase.backup.BackupType; import org.apache.hadoop.hbase.backup.BackupType;
import org.apache.hadoop.hbase.backup.master.LogRollMasterProcedureManager; import org.apache.hadoop.hbase.backup.master.LogRollMasterProcedureManager;
import org.apache.hadoop.hbase.backup.util.BackupUtils; import org.apache.hadoop.hbase.backup.util.BackupUtils;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;

View File

@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.backup.impl.BackupSystemTable.WALItem; import org.apache.hadoop.hbase.backup.impl.BackupSystemTable.WALItem;
import org.apache.hadoop.hbase.backup.master.LogRollMasterProcedureManager; import org.apache.hadoop.hbase.backup.master.LogRollMasterProcedureManager;
import org.apache.hadoop.hbase.backup.util.BackupUtils; import org.apache.hadoop.hbase.backup.util.BackupUtils;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.FSUtils;

View File

@ -43,7 +43,7 @@ import org.apache.hadoop.hbase.backup.BackupRequest;
import org.apache.hadoop.hbase.backup.BackupRestoreFactory; import org.apache.hadoop.hbase.backup.BackupRestoreFactory;
import org.apache.hadoop.hbase.backup.BackupType; import org.apache.hadoop.hbase.backup.BackupType;
import org.apache.hadoop.hbase.backup.util.BackupUtils; import org.apache.hadoop.hbase.backup.util.BackupUtils;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.mapreduce.WALPlayer; import org.apache.hadoop.hbase.mapreduce.WALPlayer;

View File

@ -41,7 +41,7 @@ import org.apache.hadoop.hbase.backup.RestoreRequest;
import org.apache.hadoop.hbase.backup.impl.BackupManifest.BackupImage; import org.apache.hadoop.hbase.backup.impl.BackupManifest.BackupImage;
import org.apache.hadoop.hbase.backup.util.BackupUtils; import org.apache.hadoop.hbase.backup.util.BackupUtils;
import org.apache.hadoop.hbase.backup.util.RestoreTool; import org.apache.hadoop.hbase.backup.util.RestoreTool;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles; import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles;

View File

@ -37,7 +37,7 @@ import org.apache.hadoop.hbase.backup.BackupRestoreConstants;
import org.apache.hadoop.hbase.backup.BackupType; import org.apache.hadoop.hbase.backup.BackupType;
import org.apache.hadoop.hbase.backup.HBackupFileSystem; import org.apache.hadoop.hbase.backup.HBackupFileSystem;
import org.apache.hadoop.hbase.backup.impl.BackupManifest.BackupImage; import org.apache.hadoop.hbase.backup.impl.BackupManifest.BackupImage;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;

View File

@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.backup.BackupInfo;
import org.apache.hadoop.hbase.backup.BackupType; import org.apache.hadoop.hbase.backup.BackupType;
import org.apache.hadoop.hbase.backup.impl.BackupManager; import org.apache.hadoop.hbase.backup.impl.BackupManager;
import org.apache.hadoop.hbase.backup.util.BackupUtils; import org.apache.hadoop.hbase.backup.util.BackupUtils;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.snapshot.ExportSnapshot; import org.apache.hadoop.hbase.snapshot.ExportSnapshot;
import org.apache.hadoop.mapreduce.Cluster; import org.apache.hadoop.mapreduce.Cluster;
import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.Counters;

View File

@ -40,7 +40,7 @@ import org.apache.hadoop.hbase.backup.HBackupFileSystem;
import org.apache.hadoop.hbase.backup.impl.BackupManifest; import org.apache.hadoop.hbase.backup.impl.BackupManifest;
import org.apache.hadoop.hbase.backup.impl.BackupSystemTable; import org.apache.hadoop.hbase.backup.impl.BackupSystemTable;
import org.apache.hadoop.hbase.backup.util.BackupUtils; import org.apache.hadoop.hbase.backup.util.BackupUtils;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.util.Pair; import org.apache.hadoop.hbase.util.Pair;

View File

@ -29,7 +29,7 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.KeyValue.Type; import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.RegionLocator; import org.apache.hadoop.hbase.client.RegionLocator;

View File

@ -31,7 +31,7 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.backup.BackupRestoreConstants; import org.apache.hadoop.hbase.backup.BackupRestoreConstants;
import org.apache.hadoop.hbase.backup.RestoreJob; import org.apache.hadoop.hbase.backup.RestoreJob;
import org.apache.hadoop.hbase.backup.util.BackupUtils; import org.apache.hadoop.hbase.backup.util.BackupUtils;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles; import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles;
import org.apache.hadoop.util.Tool; import org.apache.hadoop.util.Tool;

View File

@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.backup.BackupRestoreConstants; import org.apache.hadoop.hbase.backup.BackupRestoreConstants;
import org.apache.hadoop.hbase.backup.impl.BackupManager; import org.apache.hadoop.hbase.backup.impl.BackupManager;
import org.apache.hadoop.hbase.backup.impl.BackupSystemTable; import org.apache.hadoop.hbase.backup.impl.BackupSystemTable;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.HMaster;

View File

@ -30,7 +30,7 @@ import org.apache.hadoop.hbase.CoordinatedStateManagerFactory;
import org.apache.hadoop.hbase.ServerName; import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.backup.BackupRestoreConstants; import org.apache.hadoop.hbase.backup.BackupRestoreConstants;
import org.apache.hadoop.hbase.backup.impl.BackupManager; import org.apache.hadoop.hbase.backup.impl.BackupManager;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.coordination.BaseCoordinatedStateManager; import org.apache.hadoop.hbase.coordination.BaseCoordinatedStateManager;
import org.apache.hadoop.hbase.errorhandling.ForeignException; import org.apache.hadoop.hbase.errorhandling.ForeignException;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;

View File

@ -26,7 +26,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.backup.impl.BackupSystemTable; import org.apache.hadoop.hbase.backup.impl.BackupSystemTable;
import org.apache.hadoop.hbase.backup.master.LogRollMasterProcedureManager; import org.apache.hadoop.hbase.backup.master.LogRollMasterProcedureManager;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.errorhandling.ForeignException; import org.apache.hadoop.hbase.errorhandling.ForeignException;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;

View File

@ -34,7 +34,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Abortable; import org.apache.hadoop.hbase.Abortable;
import org.apache.hadoop.hbase.DaemonThreadFactory; import org.apache.hadoop.hbase.DaemonThreadFactory;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.errorhandling.ForeignException; import org.apache.hadoop.hbase.errorhandling.ForeignException;
/** /**
@ -136,4 +136,4 @@ public class LogRollBackupSubprocedurePool implements Closeable, Abortable {
public boolean isAborted() { public boolean isAborted() {
return this.aborted; return this.aborted;
} }
} }

View File

@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.CoordinatedStateManagerFactory;
import org.apache.hadoop.hbase.backup.BackupRestoreConstants; import org.apache.hadoop.hbase.backup.BackupRestoreConstants;
import org.apache.hadoop.hbase.backup.impl.BackupManager; import org.apache.hadoop.hbase.backup.impl.BackupManager;
import org.apache.hadoop.hbase.backup.master.LogRollMasterProcedureManager; import org.apache.hadoop.hbase.backup.master.LogRollMasterProcedureManager;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.coordination.BaseCoordinatedStateManager; import org.apache.hadoop.hbase.coordination.BaseCoordinatedStateManager;
import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher; import org.apache.hadoop.hbase.errorhandling.ForeignExceptionDispatcher;
import org.apache.hadoop.hbase.procedure.ProcedureMember; import org.apache.hadoop.hbase.procedure.ProcedureMember;

View File

@ -21,7 +21,7 @@ import java.util.List;
import org.apache.commons.lang3.StringUtils; import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Backup set is a named group of HBase tables, which are managed together by Backup/Restore * Backup set is a named group of HBase tables, which are managed together by Backup/Restore

View File

@ -53,7 +53,7 @@ import org.apache.hadoop.hbase.backup.HBackupFileSystem;
import org.apache.hadoop.hbase.backup.RestoreRequest; import org.apache.hadoop.hbase.backup.RestoreRequest;
import org.apache.hadoop.hbase.backup.impl.BackupManifest; import org.apache.hadoop.hbase.backup.impl.BackupManifest;
import org.apache.hadoop.hbase.backup.impl.BackupManifest.BackupImage; import org.apache.hadoop.hbase.backup.impl.BackupManifest.BackupImage;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles; import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles;

View File

@ -38,7 +38,7 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.backup.BackupRestoreFactory; import org.apache.hadoop.hbase.backup.BackupRestoreFactory;
import org.apache.hadoop.hbase.backup.HBackupFileSystem; import org.apache.hadoop.hbase.backup.HBackupFileSystem;
import org.apache.hadoop.hbase.backup.RestoreJob; import org.apache.hadoop.hbase.backup.RestoreJob;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;

View File

@ -94,4 +94,4 @@ public class TestBackupBoundaryTests extends TestBackupBase {
List<TableName> tables = toList(table1.getNameAsString(), "tabledne"); List<TableName> tables = toList(table1.getNameAsString(), "tabledne");
fullTableBackup(tables); fullTableBackup(tables);
} }
} }

View File

@ -67,4 +67,4 @@ public class TestBackupDeleteRestore extends TestBackupBase {
assertEquals(numRows, numRowsAfterRestore); assertEquals(numRows, numRowsAfterRestore);
hba.close(); hba.close();
} }
} }

View File

@ -138,4 +138,4 @@ public class TestBackupHFileCleaner {
assertFalse("Cleaner should not allow to delete this file as there is a hfile reference " assertFalse("Cleaner should not allow to delete this file as there is a hfile reference "
+ "for it.", found); + "for it.", found);
} }
} }

View File

@ -88,4 +88,4 @@ public class TestBackupRepair extends TestBackupBase {
} }
} }

View File

@ -145,4 +145,4 @@ public class TestBackupShowHistory extends TestBackupBase {
LOG.info("show_history"); LOG.info("show_history");
} }
} }

View File

@ -56,4 +56,4 @@ public class TestFullBackup extends TestBackupBase {
LOG.info("backup complete"); LOG.info("backup complete");
} }
} }

View File

@ -100,4 +100,4 @@ public class TestFullBackupSet extends TestBackupBase {
} }
} }

View File

@ -125,4 +125,4 @@ public class TestFullBackupSetRestoreSet extends TestBackupBase {
} }
} }

View File

@ -76,4 +76,4 @@ public class TestFullBackupWithFailures extends TestBackupBase {
} }
} }

View File

@ -132,4 +132,4 @@ public class TestRemoteBackup extends TestBackupBase {
hAdmin.close(); hAdmin.close();
} }
} }

View File

@ -56,4 +56,4 @@ public class TestRemoteRestore extends TestBackupBase {
hba.close(); hba.close();
} }
} }

View File

@ -77,4 +77,4 @@ public class TestRestoreBoundaryTests extends TestBackupBase {
TEST_UTIL.deleteTable(table3_restore); TEST_UTIL.deleteTable(table3_restore);
hba.close(); hba.close();
} }
} }

View File

@ -52,4 +52,4 @@ public class TestSystemTableSnapshot extends TestBackupBase {
hba.close(); hba.close();
} }
} }

View File

@ -43,6 +43,18 @@
</plugin> </plugin>
</plugins> </plugins>
</build> </build>
<dependencies>
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-annotations</artifactId>
<type>test-jar</type>
<scope>test</scope>
</dependency>
<dependency>
<groupId>org.apache.yetus</groupId>
<artifactId>audience-annotations</artifactId>
</dependency>
</dependencies>
<profiles> <profiles>
<profile> <profile>
<id>errorProne</id> <id>errorProne</id>

View File

@ -92,16 +92,6 @@
<artifactId>hbase-shaded-protobuf</artifactId> <artifactId>hbase-shaded-protobuf</artifactId>
</dependency> </dependency>
<!-- Intra-project dependencies --> <!-- Intra-project dependencies -->
<dependency>
<groupId>org.apache.hbase</groupId>
<artifactId>hbase-annotations</artifactId>
<exclusions>
<exclusion>
<groupId>jdk.tools</groupId>
<artifactId>jdk.tools</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency> <dependency>
<groupId>org.apache.hbase</groupId> <groupId>org.apache.hbase</groupId>
<artifactId>hbase-annotations</artifactId> <artifactId>hbase-annotations</artifactId>

View File

@ -18,8 +18,8 @@
*/ */
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.yetus.audience.InterfaceStability;
/** /**
* Interface to support the aborting of a given server or client. * Interface to support the aborting of a given server or client.

View File

@ -38,7 +38,7 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.MetaTableAccessor.CollectingVisitor; import org.apache.hadoop.hbase.MetaTableAccessor.CollectingVisitor;
import org.apache.hadoop.hbase.MetaTableAccessor.QueryType; import org.apache.hadoop.hbase.MetaTableAccessor.QueryType;
import org.apache.hadoop.hbase.MetaTableAccessor.Visitor; import org.apache.hadoop.hbase.MetaTableAccessor.Visitor;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Consistency; import org.apache.hadoop.hbase.client.Consistency;
import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.RawAsyncTable; import org.apache.hadoop.hbase.client.RawAsyncTable;

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Returned to the clients when their request was discarded due to server being overloaded. * Returned to the clients when their request was discarded due to server being overloaded.

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
@SuppressWarnings("serial") @SuppressWarnings("serial")
@InterfaceAudience.Public @InterfaceAudience.Public

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**
* This exception is thrown by the master when a region server clock skew is * This exception is thrown by the master when a region server clock skew is

View File

@ -21,7 +21,7 @@ package org.apache.hadoop.hbase;
import java.io.IOException; import java.io.IOException;
import java.util.UUID; import java.util.UUID;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterIdProtos;

View File

@ -25,7 +25,7 @@ import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.master.RegionState; import org.apache.hadoop.hbase.master.RegionState;
import com.google.common.base.Objects; import com.google.common.base.Objects;

View File

@ -17,7 +17,7 @@
*/ */
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Generic set of comparison operators. * Generic set of comparison operators.
@ -41,4 +41,4 @@ public enum CompareOperator {
GREATER, GREATER,
/** no operation */ /** no operation */
NO_OP, NO_OP,
} }

View File

@ -18,7 +18,7 @@
*/ */
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.exceptions.HBaseException; import org.apache.hadoop.hbase.exceptions.HBaseException;
/** /**

View File

@ -21,8 +21,8 @@ package org.apache.hadoop.hbase;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.yetus.audience.InterfaceStability;
/** /**
* Coprocessor interface. * Coprocessor interface.

View File

@ -23,7 +23,7 @@ import java.io.IOException;
import java.util.concurrent.ExecutorService; import java.util.concurrent.ExecutorService;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
/** /**

View File

@ -18,7 +18,7 @@
*/ */
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Subclass if exception is not meant to be retried: e.g. * Subclass if exception is not meant to be retried: e.g.

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase;
import java.util.Map; import java.util.Map;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor;

View File

@ -28,7 +28,7 @@ import java.util.stream.Collectors;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.RegionReplicaUtil; import org.apache.hadoop.hbase.client.RegionReplicaUtil;
import org.apache.hadoop.hbase.KeyValue.KVComparator; import org.apache.hadoop.hbase.KeyValue.KVComparator;
import org.apache.hadoop.hbase.exceptions.DeserializationException; import org.apache.hadoop.hbase.exceptions.DeserializationException;

View File

@ -18,7 +18,7 @@
*/ */
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.util.Addressing; import org.apache.hadoop.hbase.util.Addressing;
/** /**

View File

@ -27,7 +27,7 @@ import java.util.stream.Collectors;
import java.util.stream.Stream; import java.util.stream.Stream;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.client.TableDescriptorBuilder;

View File

@ -18,7 +18,7 @@
*/ */
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Thrown if a request is table schema modification is requested but * Thrown if a request is table schema modification is requested but

View File

@ -18,7 +18,7 @@
*/ */
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Ways to keep cells marked for delete around. * Ways to keep cells marked for delete around.

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Thrown if the master is not running * Thrown if the master is not running

View File

@ -18,7 +18,7 @@
*/ */
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Enum describing all possible memory compaction policies * Enum describing all possible memory compaction policies

View File

@ -36,7 +36,7 @@ import java.util.regex.Pattern;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.Consistency; import org.apache.hadoop.hbase.client.Consistency;

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Exception thrown when the result needs to be chunked on the server side. * Exception thrown when the result needs to be chunked on the server side.

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Thrown when a namespace exists but should not * Thrown when a namespace exists but should not

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Thrown when a namespace can not be located * Thrown when a namespace can not be located

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Thrown when an operation requires the root and all meta regions to be online * Thrown when an operation requires the root and all meta regions to be online

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
/** /**

View File

@ -18,7 +18,7 @@
*/ */
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**
* This exception is thrown by the master when a region server was shut down and * This exception is thrown by the master when a region server was shut down and

View File

@ -18,7 +18,7 @@
*/ */
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Thrown when something happens related to region handling. * Thrown when something happens related to region handling.

View File

@ -22,7 +22,7 @@ package org.apache.hadoop.hbase;
import java.util.List; import java.util.List;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos.StoreSequenceId;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase;
import java.util.Collection; import java.util.Collection;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.client.RegionReplicaUtil; import org.apache.hadoop.hbase.client.RegionReplicaUtil;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;

View File

@ -19,7 +19,7 @@ package org.apache.hadoop.hbase;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Thrown by a region server if it will block and wait to serve a request. * Thrown by a region server if it will block and wait to serve a request.

View File

@ -18,7 +18,7 @@
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Thrown when a replication peer can not be found * Thrown when a replication peer can not be found

View File

@ -19,7 +19,7 @@
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Public @InterfaceAudience.Public
public class RetryImmediatelyException extends IOException { public class RetryImmediatelyException extends IOException {

View File

@ -26,7 +26,7 @@ import java.util.Map;
import java.util.TreeMap; import java.util.TreeMap;
import java.util.TreeSet; import java.util.TreeSet;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos; import org.apache.hadoop.hbase.shaded.protobuf.generated.ClusterStatusProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor; import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.Coprocessor;

View File

@ -18,7 +18,7 @@
*/ */
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
/** /**
* Thrown when a table exists but should not * Thrown when a table exists but should not

Some files were not shown because too many files have changed in this diff Show More