HBASE-20656 Validate pre-2.0 coprocessors against HBase 2.0+

Signed-off-by: Mike Drob <mdrob@apache.org>
This commit is contained in:
Balazs Meszaros 2018-05-23 13:49:19 +02:00 committed by Mike Drob
parent eb13cdd7ed
commit c323e7bfaa
No known key found for this signature in database
GPG Key ID: 3E48C0C6EF362B9E
13 changed files with 2012 additions and 89 deletions

View File

@ -38,4 +38,5 @@
<suppress checks="InterfaceIsTypeCheck" files=".*/src/main/.*\.java"/>
<suppress checks="EmptyBlockCheck" files="TBoundedThreadPoolServer.java"/>
<suppress checks="EqualsHashCode" files="StartcodeAgnosticServerName.java"/>
<suppress checks="MethodLength" files="Branch1CoprocessorMethods.java"/>
</suppressions>

View File

@ -22,7 +22,6 @@ import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.util.Tool;
@ -46,9 +45,9 @@ import org.apache.hbase.thirdparty.org.apache.commons.cli.ParseException;
* command-line argument parsing.
*/
@InterfaceAudience.Private
public abstract class AbstractHBaseTool implements Tool, Configurable {
protected static final int EXIT_SUCCESS = 0;
protected static final int EXIT_FAILURE = 1;
public abstract class AbstractHBaseTool implements Tool {
public static final int EXIT_SUCCESS = 0;
public static final int EXIT_FAILURE = 1;
public static final String SHORT_HELP_OPTION = "h";
public static final String LONG_HELP_OPTION = "help";

View File

@ -21,13 +21,9 @@ package org.apache.hadoop.hbase.coprocessor;
import java.io.IOException;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.TableName;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest;
/**
* Coprocessors implement this interface to observe and mediate bulk load operations.
@ -55,7 +51,7 @@ public interface BulkLoadObserver {
* It can't bypass the default action, e.g., ctx.bypass() won't have effect.
* If you need to get the region or table name, get it from the
* <code>ctx</code> as follows: <code>code>ctx.getEnvironment().getRegion()</code>. Use
* getRegionInfo to fetch the encodedName and use getTabldDescriptor() to get the tableName.
* getRegionInfo to fetch the encodedName and use getTableDescriptor() to get the tableName.
* @param ctx the environment to interact with the framework and master
*/
default void prePrepareBulkLoad(ObserverContext<RegionCoprocessorEnvironment> ctx)
@ -66,7 +62,7 @@ public interface BulkLoadObserver {
* It can't bypass the default action, e.g., ctx.bypass() won't have effect.
* If you need to get the region or table name, get it from the
* <code>ctx</code> as follows: <code>code>ctx.getEnvironment().getRegion()</code>. Use
* getRegionInfo to fetch the encodedName and use getTabldDescriptor() to get the tableName.
* getRegionInfo to fetch the encodedName and use getTableDescriptor() to get the tableName.
* @param ctx the environment to interact with the framework and master
*/
default void preCleanupBulkLoad(ObserverContext<RegionCoprocessorEnvironment> ctx)

View File

@ -0,0 +1,108 @@
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.tool;
import java.io.IOException;
import java.util.List;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.util.AbstractHBaseTool;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
public class DataBlockEncodingValidator extends AbstractHBaseTool {
private static final Logger LOG = LoggerFactory.getLogger(DataBlockEncodingValidator.class);
private static final byte[] DATA_BLOCK_ENCODING = Bytes.toBytes("DATA_BLOCK_ENCODING");
/**
* Check DataBlockEncodings of column families are compatible.
*
* @return number of column families with incompatible DataBlockEncoding
* @throws IOException if a remote or network exception occurs
*/
private int validateDBE() throws IOException {
int incompatibilities = 0;
LOG.info("Validating Data Block Encodings");
try (Connection connection = ConnectionFactory.createConnection(getConf());
Admin admin = connection.getAdmin()) {
List<TableDescriptor> tableDescriptors = admin.listTableDescriptors();
String encoding = "";
for (TableDescriptor td : tableDescriptors) {
ColumnFamilyDescriptor[] columnFamilies = td.getColumnFamilies();
for (ColumnFamilyDescriptor cfd : columnFamilies) {
try {
encoding = Bytes.toString(cfd.getValue(DATA_BLOCK_ENCODING));
// IllegalArgumentException will be thrown if encoding is incompatible with 2.0
DataBlockEncoding.valueOf(encoding);
} catch (IllegalArgumentException e) {
incompatibilities++;
LOG.warn("Incompatible DataBlockEncoding for table: {}, cf: {}, encoding: {}",
td.getTableName().getNameAsString(), cfd.getNameAsString(), encoding);
}
}
}
}
if (incompatibilities > 0) {
LOG.warn("There are {} column families with incompatible Data Block Encodings. Do not "
+ "upgrade until these encodings are converted to a supported one.", incompatibilities);
LOG.warn("Check http://hbase.apache.org/book.html#upgrade2.0.prefix-tree.removed "
+ "for instructions.");
} else {
LOG.info("The used Data Block Encodings are compatible with HBase 2.0.");
}
return incompatibilities;
}
@Override
protected void printUsage() {
String header = "hbase " + PreUpgradeValidator.TOOL_NAME + " " +
PreUpgradeValidator.VALIDATE_DBE_NAME;
printUsage(header, null, "");
}
@Override
protected void addOptions() {
}
@Override
protected void processOptions(CommandLine cmd) {
}
@Override
protected int doWork() throws Exception {
return (validateDBE() == 0) ? EXIT_SUCCESS : EXIT_FAILURE;
}
}

View File

@ -18,111 +18,99 @@
*/
package org.apache.hadoop.hbase.tool;
import java.io.IOException;
import java.util.List;
import java.util.Arrays;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.tool.coprocessor.CoprocessorValidator;
import org.apache.hadoop.hbase.util.AbstractHBaseTool;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
/**
* Tool for validating that cluster can be upgraded from HBase 1.x to 2.0
* <p>
* Available validations:
* <ul>
* <li>all: Run all pre-upgrade validations</li>
* <li>validateDBE: Check Data Block Encoding for column families</li>
* <li>validate-cp: Validates Co-processors compatibility</li>
* <li>validate-dbe: Check Data Block Encoding for column families</li>
* </ul>
* </p>
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
public class PreUpgradeValidator extends AbstractHBaseTool {
public class PreUpgradeValidator implements Tool {
private static final Logger LOG = LoggerFactory
.getLogger(PreUpgradeValidator.class);
public static final String NAME = "pre-upgrade";
private static final Logger LOG = LoggerFactory.getLogger(PreUpgradeValidator.class);
private static final byte[] DATA_BLOCK_ENCODING = Bytes.toBytes("DATA_BLOCK_ENCODING");
private boolean validateAll;
private boolean validateDBE;
public static final String TOOL_NAME = "pre-upgrade";
public static final String VALIDATE_CP_NAME = "validate-cp";
public static final String VALIDATE_DBE_NAME = "validate-dbe";
/**
* Check DataBlockEncodings of column families are compatible.
*
* @return number of column families with incompatible DataBlockEncoding
* @throws IOException if a remote or network exception occurs
*/
private int validateDBE() throws IOException {
int incompatibilities = 0;
private Configuration configuration;
LOG.info("Validating Data Block Encodings");
try (Connection connection = ConnectionFactory.createConnection(getConf());
Admin admin = connection.getAdmin()) {
List<TableDescriptor> tableDescriptors = admin.listTableDescriptors();
String encoding = "";
for (TableDescriptor td : tableDescriptors) {
ColumnFamilyDescriptor[] columnFamilies = td.getColumnFamilies();
for (ColumnFamilyDescriptor cfd : columnFamilies) {
try {
encoding = Bytes.toString(cfd.getValue(DATA_BLOCK_ENCODING));
// IllegalArgumentException will be thrown if encoding is incompatible with 2.0
DataBlockEncoding.valueOf(encoding);
} catch (IllegalArgumentException e) {
incompatibilities++;
LOG.warn("Incompatible DataBlockEncoding for table: {}, cf: {}, encoding: {}",
td.getTableName().getNameAsString(), cfd.getNameAsString(), encoding);
}
}
}
}
if (incompatibilities > 0) {
LOG.warn("There are {} column families with incompatible Data Block Encodings. Do not "
+ "upgrade until these encodings are converted to a supported one.", incompatibilities);
LOG.warn("Check http://hbase.apache.org/book.html#upgrade2.0.prefix-tree.removed "
+ "for instructions.");
} else {
LOG.info("The used Data Block Encodings are compatible with HBase 2.0.");
}
return incompatibilities;
@Override
public Configuration getConf() {
return configuration;
}
@Override
protected void addOptions() {
addOptNoArg("all", "Run all pre-upgrade validations");
addOptNoArg("validateDBE", "Validate DataBlockEncodings are compatible");
public void setConf(Configuration conf) {
this.configuration = conf;
}
private void printUsage() {
System.out.println("usage: hbase " + TOOL_NAME + " command ...");
System.out.println("Available commands:");
System.out.printf(" %-12s Validate co-processors are compatible with HBase%n",
VALIDATE_CP_NAME);
System.out.printf(" %-12s Validate DataBlockEncoding are compatible on the cluster%n",
VALIDATE_DBE_NAME);
System.out.println("For further information, please use command -h");
}
@Override
protected void processOptions(CommandLine cmd) {
validateAll = cmd.hasOption("all");
validateDBE = cmd.hasOption("validateDBE");
public int run(String[] args) throws Exception {
if (args.length == 0) {
printUsage();
return AbstractHBaseTool.EXIT_FAILURE;
}
@Override
protected int doWork() throws Exception {
boolean validationFailed = false;
if (validateDBE || validateAll) {
if (validateDBE() > 0) {
validationFailed = true;
}
Tool tool;
switch (args[0]) {
case VALIDATE_CP_NAME:
tool = new CoprocessorValidator();
break;
case VALIDATE_DBE_NAME:
tool = new DataBlockEncodingValidator();
break;
case "-h":
printUsage();
return AbstractHBaseTool.EXIT_FAILURE;
default:
System.err.println("Unknown command: " + args[0]);
printUsage();
return AbstractHBaseTool.EXIT_FAILURE;
}
return validationFailed ? 1 : 0;
tool.setConf(getConf());
return tool.run(Arrays.copyOfRange(args, 1, args.length));
}
public static void main(String[] args) {
new PreUpgradeValidator().doStaticMain(args);
int ret;
try {
ret = ToolRunner.run(HBaseConfiguration.create(), new PreUpgradeValidator(), args);
} catch (Exception e) {
LOG.error("Error running command-line tool", e);
ret = AbstractHBaseTool.EXIT_FAILURE;
}
System.exit(ret);
}
}

View File

@ -0,0 +1,73 @@
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.tool.coprocessor;
import java.util.ArrayList;
import java.util.List;
import java.util.Objects;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class CoprocessorMethod {
private final String name;
private final List<String> parameters;
public CoprocessorMethod(String name) {
this.name = name;
parameters = new ArrayList<>();
}
public CoprocessorMethod withParameters(String ... parameters) {
for (String parameter : parameters) {
this.parameters.add(parameter);
}
return this;
}
public CoprocessorMethod withParameters(Class<?> ... parameters) {
for (Class<?> parameter : parameters) {
this.parameters.add(parameter.getCanonicalName());
}
return this;
}
@Override
public boolean equals(Object obj) {
if (obj == this) {
return true;
} else if (!(obj instanceof CoprocessorMethod)) {
return false;
}
CoprocessorMethod other = (CoprocessorMethod)obj;
return Objects.equals(name, other.name) &&
Objects.equals(parameters, other.parameters);
}
@Override
public int hashCode() {
return Objects.hash(name, parameters);
}
}

View File

@ -0,0 +1,66 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.tool.coprocessor;
import java.lang.reflect.Method;
import java.util.HashSet;
import java.util.Set;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class CoprocessorMethods {
private final Set<CoprocessorMethod> methods;
public CoprocessorMethods() {
methods = new HashSet<>();
}
public void addMethod(String name, String ... parameters) {
CoprocessorMethod cpMethod = new CoprocessorMethod(name).withParameters(parameters);
methods.add(cpMethod);
}
public void addMethod(String name, Class<?> ... parameters) {
CoprocessorMethod cpMethod = new CoprocessorMethod(name).withParameters(parameters);
methods.add(cpMethod);
}
public void addMethod(Method method) {
CoprocessorMethod cpMethod = new CoprocessorMethod(method.getName())
.withParameters(method.getParameterTypes());
methods.add(cpMethod);
}
public boolean hasMethod(String name, String ... parameters) {
CoprocessorMethod method = new CoprocessorMethod(name).withParameters(parameters);
return methods.contains(method);
}
public boolean hasMethod(String name, Class<?> ... parameters) {
CoprocessorMethod method = new CoprocessorMethod(name).withParameters(parameters);
return methods.contains(method);
}
public boolean hasMethod(Method method) {
CoprocessorMethod cpMethod = new CoprocessorMethod(method.getName())
.withParameters(method.getParameterTypes());
return methods.contains(cpMethod);
}
}

View File

@ -0,0 +1,247 @@
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.tool.coprocessor;
import java.io.IOException;
import java.lang.reflect.Method;
import java.net.URL;
import java.net.URLClassLoader;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.ArrayList;
import java.util.List;
import java.util.jar.JarEntry;
import java.util.jar.JarFile;
import java.util.stream.Collectors;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.tool.PreUpgradeValidator;
import org.apache.hadoop.hbase.tool.coprocessor.CoprocessorViolation.Severity;
import org.apache.hadoop.hbase.util.AbstractHBaseTool;
import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
import org.apache.hbase.thirdparty.org.apache.commons.cli.ParseException;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
public class CoprocessorValidator extends AbstractHBaseTool {
private static final Logger LOG = LoggerFactory
.getLogger(CoprocessorValidator.class);
private CoprocessorMethods branch1;
private CoprocessorMethods current;
private boolean dieOnWarnings;
private boolean scan;
private List<String> args;
public CoprocessorValidator() {
branch1 = new Branch1CoprocessorMethods();
current = new CurrentCoprocessorMethods();
}
/**
* This classloader implementation calls {@link #resolveClass(Class)}
* method for every loaded class. It means that some extra validation will
* take place <a
* href="https://docs.oracle.com/javase/specs/jls/se8/html/jls-12.html#jls-12.3">
* according to JLS</a>.
*/
private static final class ResolverUrlClassLoader extends URLClassLoader {
private ResolverUrlClassLoader(URL[] urls) {
super(urls, ResolverUrlClassLoader.class.getClassLoader());
}
@Override
public Class<?> loadClass(String name) throws ClassNotFoundException {
return loadClass(name, true);
}
}
private ResolverUrlClassLoader createClassLoader(URL[] urls) {
return AccessController.doPrivileged(new PrivilegedAction<ResolverUrlClassLoader>() {
@Override
public ResolverUrlClassLoader run() {
return new ResolverUrlClassLoader(urls);
}
});
}
private void validate(ClassLoader classLoader, String className,
List<CoprocessorViolation> violations) {
LOG.debug("Validating class '{}'.", className);
try {
Class<?> clazz = classLoader.loadClass(className);
for (Method method : clazz.getDeclaredMethods()) {
LOG.trace("Validating method '{}'.", method);
if (branch1.hasMethod(method) && !current.hasMethod(method)) {
CoprocessorViolation violation = new CoprocessorViolation(Severity.WARNING,
"Method '" + method + "' was removed from new coprocessor API, "
+ "so it won't be called by HBase.");
violations.add(violation);
}
}
} catch (ClassNotFoundException e) {
CoprocessorViolation violation = new CoprocessorViolation(Severity.ERROR,
"No such class '" + className + "'.", e);
violations.add(violation);
} catch (RuntimeException | Error e) {
CoprocessorViolation violation = new CoprocessorViolation(Severity.ERROR,
"Could not validate class '" + className + "'.", e);
violations.add(violation);
}
}
public List<CoprocessorViolation> validate(ClassLoader classLoader, List<String> classNames) {
List<CoprocessorViolation> violations = new ArrayList<>();
for (String className : classNames) {
validate(classLoader, className, violations);
}
return violations;
}
public List<CoprocessorViolation> validate(List<URL> urls, List<String> classNames)
throws IOException {
URL[] urlArray = new URL[urls.size()];
urls.toArray(urlArray);
try (ResolverUrlClassLoader classLoader = createClassLoader(urlArray)) {
return validate(classLoader, classNames);
}
}
@VisibleForTesting
protected List<String> getJarClasses(Path path) throws IOException {
try (JarFile jarFile = new JarFile(path.toFile())) {
return jarFile.stream()
.map(JarEntry::getName)
.filter((name) -> name.endsWith(".class"))
.map((name) -> name.substring(0, name.length() - 6).replace('/', '.'))
.collect(Collectors.toList());
}
}
@VisibleForTesting
protected List<String> filterObservers(ClassLoader classLoader,
Iterable<String> classNames) throws ClassNotFoundException {
List<String> filteredClassNames = new ArrayList<>();
for (String className : classNames) {
LOG.debug("Scanning class '{}'.", className);
Class<?> clazz = classLoader.loadClass(className);
if (Coprocessor.class.isAssignableFrom(clazz)) {
LOG.debug("Found coprocessor class '{}'.", className);
filteredClassNames.add(className);
}
}
return filteredClassNames;
}
@Override
protected void printUsage() {
String header = "hbase " + PreUpgradeValidator.TOOL_NAME + " " +
PreUpgradeValidator.VALIDATE_CP_NAME + " <jar> -scan|<classes>";
printUsage(header, "Options:", "");
}
@Override
protected void addOptions() {
addOptNoArg("e", "Treat warnings as errors.");
addOptNoArg("scan", "Scan jar for observers.");
}
@Override
protected void processOptions(CommandLine cmd) {
scan = cmd.hasOption("scan");
dieOnWarnings = cmd.hasOption("e");
args = cmd.getArgList();
}
@Override
protected int doWork() throws Exception {
if (args.size() < 1) {
System.err.println("Missing jar file.");
printUsage();
return EXIT_FAILURE;
}
String jar = args.get(0);
if (args.size() == 1 && !scan) {
throw new ParseException("Missing classes or -scan option.");
} else if (args.size() > 1 && scan) {
throw new ParseException("Can't use classes with -scan option.");
}
Path jarPath = Paths.get(jar);
URL[] urls = new URL[] { jarPath.toUri().toURL() };
List<CoprocessorViolation> violations;
try (ResolverUrlClassLoader classLoader = createClassLoader(urls)) {
List<String> classNames;
if (scan) {
List<String> jarClassNames = getJarClasses(jarPath);
classNames = filterObservers(classLoader, jarClassNames);
} else {
classNames = args.subList(1, args.size());
}
violations = validate(classLoader, classNames);
}
boolean error = false;
for (CoprocessorViolation violation : violations) {
switch (violation.getSeverity()) {
case WARNING:
System.err.println("[WARNING] " + violation.getMessage());
if (dieOnWarnings) {
error = true;
}
break;
case ERROR:
System.err.println("[ERROR] " + violation.getMessage());
error = true;
break;
}
}
return (error) ? EXIT_FAILURE : EXIT_SUCCESS;
}
}

View File

@ -0,0 +1,56 @@
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.tool.coprocessor;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hbase.thirdparty.com.google.common.base.Throwables;
@InterfaceAudience.Private
public class CoprocessorViolation {
public enum Severity {
WARNING, ERROR
}
private final Severity severity;
private final String message;
public CoprocessorViolation(Severity severity, String message) {
this(severity, message, null);
}
public CoprocessorViolation(Severity severity, String message, Throwable t) {
this.severity = severity;
if (t == null) {
this.message = message;
} else {
this.message = message + "\n" + Throwables.getStackTraceAsString(t);
}
}
public Severity getSeverity() {
return severity;
}
public String getMessage() {
return message;
}
}

View File

@ -0,0 +1,47 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.tool.coprocessor;
import java.lang.reflect.Method;
import org.apache.hadoop.hbase.coprocessor.BulkLoadObserver;
import org.apache.hadoop.hbase.coprocessor.EndpointObserver;
import org.apache.hadoop.hbase.coprocessor.MasterObserver;
import org.apache.hadoop.hbase.coprocessor.RegionObserver;
import org.apache.hadoop.hbase.coprocessor.RegionServerObserver;
import org.apache.hadoop.hbase.coprocessor.WALObserver;
import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class CurrentCoprocessorMethods extends CoprocessorMethods {
public CurrentCoprocessorMethods() {
addMethods(BulkLoadObserver.class);
addMethods(EndpointObserver.class);
addMethods(MasterObserver.class);
addMethods(RegionObserver.class);
addMethods(RegionServerObserver.class);
addMethods(WALObserver.class);
}
private void addMethods(Class<?> clazz) {
for (Method method : clazz.getDeclaredMethods()) {
addMethod(method);
}
}
}

View File

@ -0,0 +1,177 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.tool.coprocessor;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.List;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.tool.coprocessor.CoprocessorViolation.Severity;
import org.junit.ClassRule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
@Category({ SmallTests.class })
@SuppressWarnings("deprecation")
public class CoprocessorValidatorTest {
@ClassRule
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(CoprocessorValidatorTest.class);
private CoprocessorValidator validator;
public CoprocessorValidatorTest() {
validator = new CoprocessorValidator();
}
private static ClassLoader getClassLoader() {
return CoprocessorValidatorTest.class.getClassLoader();
}
private static String getFullClassName(String className) {
return CoprocessorValidatorTest.class.getName() + "$" + className;
}
@SuppressWarnings({"rawtypes", "unused"})
private static class TestObserver implements Coprocessor {
@Override
public void start(CoprocessorEnvironment env) throws IOException {
}
@Override
public void stop(CoprocessorEnvironment env) throws IOException {
}
}
@Test
public void testFilterObservers() throws Exception {
String filterObservers = getFullClassName("TestObserver");
List<String> classNames = Lists.newArrayList(
filterObservers, getClass().getName());
List<String> filteredClassNames = validator.filterObservers(getClassLoader(), classNames);
assertEquals(1, filteredClassNames.size());
assertEquals(filterObservers, filteredClassNames.get(0));
}
private List<CoprocessorViolation> validate(String className) {
ClassLoader classLoader = getClass().getClassLoader();
return validate(classLoader, className);
}
private List<CoprocessorViolation> validate(ClassLoader classLoader, String className) {
List<String> classNames = Lists.newArrayList(getClass().getName() + "$" + className);
return validator.validate(classLoader, classNames);
}
/*
* In this test case, we are try to load a not-existent class.
*/
@Test
public void testNoSuchClass() throws IOException {
List<CoprocessorViolation> violations = validate("NoSuchClass");
assertEquals(1, violations.size());
CoprocessorViolation violation = violations.get(0);
assertEquals(Severity.ERROR, violation.getSeverity());
assertTrue(violation.getMessage().contains(
"java.lang.ClassNotFoundException: " +
"org.apache.hadoop.hbase.tool.coprocessor.CoprocessorValidatorTest$NoSuchClass"));
}
/*
* In this test case, we are validating MissingClass coprocessor, which
* references a missing class. With a special classloader, we prevent that
* class to be loaded at runtime. It simulates similar cases where a class
* is no more on our classpath.
* E.g. org.apache.hadoop.hbase.regionserver.wal.WALEdit was moved to
* org.apache.hadoop.hbase.wal, so class loading will fail on 2.0.
*/
private static class MissingClass {
}
@SuppressWarnings("unused")
private static class MissingClassObserver {
public void method(MissingClass missingClass) {
}
}
private static class MissingClassClassLoader extends ClassLoader {
public MissingClassClassLoader() {
super(getClassLoader());
}
@Override
public Class<?> loadClass(String name) throws ClassNotFoundException {
if (name.equals(getFullClassName("MissingClass"))) {
throw new ClassNotFoundException(name);
}
return super.findClass(name);
}
}
@Test
public void testMissingClass() throws IOException {
MissingClassClassLoader missingClassClassLoader = new MissingClassClassLoader();
List<CoprocessorViolation> violations = validate(missingClassClassLoader,
"MissingClassObserver");
assertEquals(1, violations.size());
CoprocessorViolation violation = violations.get(0);
assertEquals(Severity.ERROR, violation.getSeverity());
assertTrue(violation.getMessage().contains(
"java.lang.ClassNotFoundException: " +
"org.apache.hadoop.hbase.tool.coprocessor.CoprocessorValidatorTest$MissingClass"));
}
/*
* ObsoleteMethod coprocessor implements preCreateTable method which has
* HRegionInfo parameters. In our current implementation, we pass only
* RegionInfo parameters, so this method won't be called by HBase at all.
*/
@SuppressWarnings("unused")
private static class ObsoleteMethodObserver /* implements MasterObserver */ {
public void preCreateTable(ObserverContext<MasterCoprocessorEnvironment> ctx,
HTableDescriptor desc, HRegionInfo[] regions) throws IOException {
}
}
@Test
public void testObsoleteMethod() throws IOException {
List<CoprocessorViolation> violations = validate("ObsoleteMethodObserver");
assertEquals(1, violations.size());
CoprocessorViolation violation = violations.get(0);
assertEquals(Severity.WARNING, violation.getSeverity());
assertTrue(violation.getMessage().contains("was removed from new coprocessor API"));
}
}

View File

@ -845,11 +845,39 @@ The LoadTestTool has received many updates in recent HBase releases, including s
[[ops.pre-upgrade]]
=== Pre-Upgrade validator
Pre-Upgrade validator tool can be used to check the cluster for known incompatibilities before upgrading from HBase 1 to HBase 2.
To run all the checks use the `-all` flag.
[source, bash]
----
$ bin/hbase pre-upgrade -all
$ bin/hbase pre-upgrade command ...
----
==== Coprocessor validation
HBase supports co-processors for a long time, but the co-processor API can be changed between major releases. Co-processor validator tries to determine
whether the old co-processors are still compatible with the actual HBase version.
[source, bash]
----
$ bin/hbase pre-upgrade validate-cp <jar> -scan|<classes>
Options:
-e Treat warnings as errors.
-scan Scan jar for observers.
----
The first parameter of the tool is the `jar` file which holds the co-processor implementation. Further parameters can be `-scan` when the tool will
search the jar file for `Coprocessor` implementations or the `classes` can be explicitly given.
The tool can report errors and warnings. Errors mean that HBase won't be able to load the coprocessor, because it is incompatible with the current version
of HBase. Warnings mean that the co-processors can be loaded, but they won't work as expected. If `-e` option is given, then the tool will also fail
for warnings.
Please note that this tool cannot validate every aspect of jar files, it just does some static checks.
For example:
[source, bash]
----
$ bin/hbase pre-upgrade validate-cp my-coprocessor.jar MyMasterObserver MyRegionObserver
----
==== DataBlockEncoding validation
@ -858,7 +886,7 @@ To verify that none of the column families are using incompatible Data Block Enc
[source, bash]
----
$ bin/hbase pre-upgrade -validateDBE
$ bin/hbase pre-upgrade validate-dbe
----
This check validates all column families and print out any incompatibilities.