HBASE-20833 Modify pre-upgrade coprocessor validator to support table level coprocessors

- -jar parameter now accepts multiple jar files and directories of jar files.
- observer classes can be verified by -class option.
- -table parameter was added to check table level coprocessors.
- -config parameter was added to obtain the coprocessor classes from
  HBase cofiguration.
- -scan option was removed.

Signed-off-by: Mike Drob <mdrob@apache.org>
This commit is contained in:
Balazs Meszaros 2018-07-02 12:11:03 +02:00 committed by Mike Drob
parent 59867eeeeb
commit ad5b4af2c4
No known key found for this signature in database
GPG Key ID: 3E48C0C6EF362B9E
5 changed files with 348 additions and 129 deletions

View File

@ -65,9 +65,9 @@ public class PreUpgradeValidator implements Tool {
private void printUsage() { private void printUsage() {
System.out.println("usage: hbase " + TOOL_NAME + " command ..."); System.out.println("usage: hbase " + TOOL_NAME + " command ...");
System.out.println("Available commands:"); System.out.println("Available commands:");
System.out.printf(" %-12s Validate co-processors are compatible with HBase%n", System.out.printf(" %-15s Validate co-processors are compatible with HBase%n",
VALIDATE_CP_NAME); VALIDATE_CP_NAME);
System.out.printf(" %-12s Validate DataBlockEncoding are compatible on the cluster%n", System.out.printf(" %-15s Validate DataBlockEncodings are compatible with HBase%n",
VALIDATE_DBE_NAME); VALIDATE_DBE_NAME);
System.out.println("For further information, please use command -h"); System.out.println("For further information, please use command -h");
} }
@ -104,8 +104,10 @@ public class PreUpgradeValidator implements Tool {
public static void main(String[] args) { public static void main(String[] args) {
int ret; int ret;
Configuration conf = HBaseConfiguration.create();
try { try {
ret = ToolRunner.run(HBaseConfiguration.create(), new PreUpgradeValidator(), args); ret = ToolRunner.run(conf, new PreUpgradeValidator(), args);
} catch (Exception e) { } catch (Exception e) {
LOG.error("Error running command-line tool", e); LOG.error("Error running command-line tool", e);
ret = AbstractHBaseTool.EXIT_FAILURE; ret = AbstractHBaseTool.EXIT_FAILURE;

View File

@ -23,18 +23,29 @@ import java.io.IOException;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import java.net.URL; import java.net.URL;
import java.net.URLClassLoader; import java.net.URLClassLoader;
import java.nio.file.Files;
import java.nio.file.Path; import java.nio.file.Path;
import java.nio.file.Paths; import java.nio.file.Paths;
import java.security.AccessController; import java.security.AccessController;
import java.security.PrivilegedAction; import java.security.PrivilegedAction;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collection;
import java.util.Collections;
import java.util.List; import java.util.List;
import java.util.jar.JarEntry; import java.util.Optional;
import java.util.jar.JarFile; import java.util.regex.Pattern;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.Stream;
import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.HBaseInterfaceAudience;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
import org.apache.hadoop.hbase.client.ConnectionFactory;
import org.apache.hadoop.hbase.client.CoprocessorDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.tool.PreUpgradeValidator; import org.apache.hadoop.hbase.tool.PreUpgradeValidator;
import org.apache.hadoop.hbase.tool.coprocessor.CoprocessorViolation.Severity; import org.apache.hadoop.hbase.tool.coprocessor.CoprocessorViolation.Severity;
import org.apache.hadoop.hbase.util.AbstractHBaseTool; import org.apache.hadoop.hbase.util.AbstractHBaseTool;
@ -44,7 +55,6 @@ import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting; import org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine; import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine;
import org.apache.hbase.thirdparty.org.apache.commons.cli.ParseException;
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS) @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.TOOLS)
public class CoprocessorValidator extends AbstractHBaseTool { public class CoprocessorValidator extends AbstractHBaseTool {
@ -54,13 +64,20 @@ public class CoprocessorValidator extends AbstractHBaseTool {
private CoprocessorMethods branch1; private CoprocessorMethods branch1;
private CoprocessorMethods current; private CoprocessorMethods current;
private final List<String> jars;
private final List<Pattern> tablePatterns;
private final List<String> classes;
private boolean config;
private boolean dieOnWarnings; private boolean dieOnWarnings;
private boolean scan;
private List<String> args;
public CoprocessorValidator() { public CoprocessorValidator() {
branch1 = new Branch1CoprocessorMethods(); branch1 = new Branch1CoprocessorMethods();
current = new CurrentCoprocessorMethods(); current = new CurrentCoprocessorMethods();
jars = new ArrayList<>();
tablePatterns = new ArrayList<>();
classes = new ArrayList<>();
} }
/** /**
@ -71,8 +88,8 @@ public class CoprocessorValidator extends AbstractHBaseTool {
* according to JLS</a>. * according to JLS</a>.
*/ */
private static final class ResolverUrlClassLoader extends URLClassLoader { private static final class ResolverUrlClassLoader extends URLClassLoader {
private ResolverUrlClassLoader(URL[] urls) { private ResolverUrlClassLoader(URL[] urls, ClassLoader parent) {
super(urls, ResolverUrlClassLoader.class.getClassLoader()); super(urls, parent);
} }
@Override @Override
@ -82,14 +99,33 @@ public class CoprocessorValidator extends AbstractHBaseTool {
} }
private ResolverUrlClassLoader createClassLoader(URL[] urls) { private ResolverUrlClassLoader createClassLoader(URL[] urls) {
return createClassLoader(urls, getClass().getClassLoader());
}
private ResolverUrlClassLoader createClassLoader(URL[] urls, ClassLoader parent) {
return AccessController.doPrivileged(new PrivilegedAction<ResolverUrlClassLoader>() { return AccessController.doPrivileged(new PrivilegedAction<ResolverUrlClassLoader>() {
@Override @Override
public ResolverUrlClassLoader run() { public ResolverUrlClassLoader run() {
return new ResolverUrlClassLoader(urls); return new ResolverUrlClassLoader(urls, parent);
} }
}); });
} }
private ResolverUrlClassLoader createClassLoader(ClassLoader parent,
org.apache.hadoop.fs.Path path) throws IOException {
Path tempPath = Files.createTempFile("hbase-coprocessor-", ".jar");
org.apache.hadoop.fs.Path destination = new org.apache.hadoop.fs.Path(tempPath.toString());
LOG.debug("Copying coprocessor jar '{}' to '{}'.", path, tempPath);
FileSystem fileSystem = FileSystem.get(getConf());
fileSystem.copyToLocalFile(path, destination);
URL url = tempPath.toUri().toURL();
return createClassLoader(new URL[] { url }, parent);
}
private void validate(ClassLoader classLoader, String className, private void validate(ClassLoader classLoader, String className,
List<CoprocessorViolation> violations) { List<CoprocessorViolation> violations) {
LOG.debug("Validating class '{}'.", className); LOG.debug("Validating class '{}'.", className);
@ -101,133 +137,189 @@ public class CoprocessorValidator extends AbstractHBaseTool {
LOG.trace("Validating method '{}'.", method); LOG.trace("Validating method '{}'.", method);
if (branch1.hasMethod(method) && !current.hasMethod(method)) { if (branch1.hasMethod(method) && !current.hasMethod(method)) {
CoprocessorViolation violation = new CoprocessorViolation(Severity.WARNING, CoprocessorViolation violation = new CoprocessorViolation(
"Method '" + method + "' was removed from new coprocessor API, " className, Severity.WARNING, "method '" + method +
+ "so it won't be called by HBase."); "' was removed from new coprocessor API, so it won't be called by HBase");
violations.add(violation); violations.add(violation);
} }
} }
} catch (ClassNotFoundException e) { } catch (ClassNotFoundException e) {
CoprocessorViolation violation = new CoprocessorViolation(Severity.ERROR, CoprocessorViolation violation = new CoprocessorViolation(
"No such class '" + className + "'.", e); className, Severity.ERROR, "no such class", e);
violations.add(violation); violations.add(violation);
} catch (RuntimeException | Error e) { } catch (RuntimeException | Error e) {
CoprocessorViolation violation = new CoprocessorViolation(Severity.ERROR, CoprocessorViolation violation = new CoprocessorViolation(
"Could not validate class '" + className + "'.", e); className, Severity.ERROR, "could not validate class", e);
violations.add(violation); violations.add(violation);
} }
} }
public List<CoprocessorViolation> validate(ClassLoader classLoader, List<String> classNames) { public void validateClasses(ClassLoader classLoader, List<String> classNames,
List<CoprocessorViolation> violations = new ArrayList<>(); List<CoprocessorViolation> violations) {
for (String className : classNames) { for (String className : classNames) {
validate(classLoader, className, violations); validate(classLoader, className, violations);
} }
return violations;
} }
public List<CoprocessorViolation> validate(List<URL> urls, List<String> classNames) public void validateClasses(ClassLoader classLoader, String[] classNames,
throws IOException { List<CoprocessorViolation> violations) {
URL[] urlArray = new URL[urls.size()]; validateClasses(classLoader, Arrays.asList(classNames), violations);
urls.toArray(urlArray);
try (ResolverUrlClassLoader classLoader = createClassLoader(urlArray)) {
return validate(classLoader, classNames);
}
} }
@VisibleForTesting @VisibleForTesting
protected List<String> getJarClasses(Path path) throws IOException { protected void validateTables(ClassLoader classLoader, Admin admin,
try (JarFile jarFile = new JarFile(path.toFile())) { Pattern pattern, List<CoprocessorViolation> violations) throws IOException {
return jarFile.stream() List<TableDescriptor> tableDescriptors = admin.listTableDescriptors(pattern);
.map(JarEntry::getName)
.filter((name) -> name.endsWith(".class"))
.map((name) -> name.substring(0, name.length() - 6).replace('/', '.'))
.collect(Collectors.toList());
}
}
@VisibleForTesting for (TableDescriptor tableDescriptor : tableDescriptors) {
protected List<String> filterObservers(ClassLoader classLoader, LOG.debug("Validating table {}", tableDescriptor.getTableName());
Iterable<String> classNames) throws ClassNotFoundException {
List<String> filteredClassNames = new ArrayList<>();
for (String className : classNames) { Collection<CoprocessorDescriptor> coprocessorDescriptors =
LOG.debug("Scanning class '{}'.", className); tableDescriptor.getCoprocessorDescriptors();
Class<?> clazz = classLoader.loadClass(className); for (CoprocessorDescriptor coprocessorDescriptor : coprocessorDescriptors) {
String className = coprocessorDescriptor.getClassName();
Optional<String> jarPath = coprocessorDescriptor.getJarPath();
if (Coprocessor.class.isAssignableFrom(clazz)) { if (jarPath.isPresent()) {
LOG.debug("Found coprocessor class '{}'.", className); org.apache.hadoop.fs.Path path = new org.apache.hadoop.fs.Path(jarPath.get());
filteredClassNames.add(className); try (ResolverUrlClassLoader cpClassLoader = createClassLoader(classLoader, path)) {
validate(cpClassLoader, className, violations);
} catch (IOException e) {
CoprocessorViolation violation = new CoprocessorViolation(
className, Severity.ERROR,
"could not validate jar file '" + path + "'", e);
violations.add(violation);
}
} else {
validate(classLoader, className, violations);
}
} }
} }
}
return filteredClassNames; private void validateTables(ClassLoader classLoader, Pattern pattern,
List<CoprocessorViolation> violations) throws IOException {
try (Connection connection = ConnectionFactory.createConnection(getConf());
Admin admin = connection.getAdmin()) {
validateTables(classLoader, admin, pattern, violations);
}
} }
@Override @Override
protected void printUsage() { protected void printUsage() {
String header = "hbase " + PreUpgradeValidator.TOOL_NAME + " " + String header = "hbase " + PreUpgradeValidator.TOOL_NAME + " " +
PreUpgradeValidator.VALIDATE_CP_NAME + " <jar> -scan|<classes>"; PreUpgradeValidator.VALIDATE_CP_NAME +
" [-jar ...] [-class ... | -table ... | -config]";
printUsage(header, "Options:", ""); printUsage(header, "Options:", "");
} }
@Override @Override
protected void addOptions() { protected void addOptions() {
addOptNoArg("e", "Treat warnings as errors."); addOptNoArg("e", "Treat warnings as errors.");
addOptNoArg("scan", "Scan jar for observers."); addOptWithArg("jar", "Jar file/directory of the coprocessor.");
addOptWithArg("table", "Table coprocessor(s) to check.");
addOptWithArg("class", "Coprocessor class(es) to check.");
addOptNoArg("config", "Obtain coprocessor class(es) from configuration.");
} }
@Override @Override
protected void processOptions(CommandLine cmd) { protected void processOptions(CommandLine cmd) {
scan = cmd.hasOption("scan"); String[] jars = cmd.getOptionValues("jar");
if (jars != null) {
Collections.addAll(this.jars, jars);
}
String[] tables = cmd.getOptionValues("table");
if (tables != null) {
Arrays.stream(tables).map(Pattern::compile).forEach(tablePatterns::add);
}
String[] classes = cmd.getOptionValues("class");
if (classes != null) {
Collections.addAll(this.classes, classes);
}
config = cmd.hasOption("config");
dieOnWarnings = cmd.hasOption("e"); dieOnWarnings = cmd.hasOption("e");
args = cmd.getArgList(); }
private List<URL> buildClasspath(List<String> jars) throws IOException {
List<URL> urls = new ArrayList<>();
for (String jar : jars) {
Path jarPath = Paths.get(jar);
if (Files.isDirectory(jarPath)) {
try (Stream<Path> stream = Files.list(jarPath)) {
List<Path> files = stream
.filter((path) -> Files.isRegularFile(path))
.collect(Collectors.toList());
for (Path file : files) {
URL url = file.toUri().toURL();
urls.add(url);
}
}
} else {
URL url = jarPath.toUri().toURL();
urls.add(url);
}
}
return urls;
} }
@Override @Override
protected int doWork() throws Exception { protected int doWork() throws Exception {
if (args.size() < 1) { if (tablePatterns.isEmpty() && classes.isEmpty() && !config) {
System.err.println("Missing jar file."); LOG.error("Please give at least one -table, -class or -config parameter.");
printUsage(); printUsage();
return EXIT_FAILURE; return EXIT_FAILURE;
} }
String jar = args.get(0); List<URL> urlList = buildClasspath(jars);
URL[] urls = urlList.toArray(new URL[urlList.size()]);
if (args.size() == 1 && !scan) { LOG.debug("Classpath: {}", urlList);
throw new ParseException("Missing classes or -scan option.");
} else if (args.size() > 1 && scan) {
throw new ParseException("Can't use classes with -scan option.");
}
Path jarPath = Paths.get(jar); List<CoprocessorViolation> violations = new ArrayList<>();
URL[] urls = new URL[] { jarPath.toUri().toURL() };
List<CoprocessorViolation> violations;
try (ResolverUrlClassLoader classLoader = createClassLoader(urls)) { try (ResolverUrlClassLoader classLoader = createClassLoader(urls)) {
List<String> classNames; for (Pattern tablePattern : tablePatterns) {
validateTables(classLoader, tablePattern, violations);
if (scan) {
List<String> jarClassNames = getJarClasses(jarPath);
classNames = filterObservers(classLoader, jarClassNames);
} else {
classNames = args.subList(1, args.size());
} }
violations = validate(classLoader, classNames); validateClasses(classLoader, classes, violations);
if (config) {
String[] masterCoprocessors =
getConf().getStrings(CoprocessorHost.MASTER_COPROCESSOR_CONF_KEY);
if (masterCoprocessors != null) {
validateClasses(classLoader, masterCoprocessors, violations);
}
String[] regionCoprocessors =
getConf().getStrings(CoprocessorHost.REGION_COPROCESSOR_CONF_KEY);
if (regionCoprocessors != null) {
validateClasses(classLoader, regionCoprocessors, violations);
}
}
} }
boolean error = false; boolean error = false;
for (CoprocessorViolation violation : violations) { for (CoprocessorViolation violation : violations) {
String className = violation.getClassName();
String message = violation.getMessage();
Throwable throwable = violation.getThrowable();
switch (violation.getSeverity()) { switch (violation.getSeverity()) {
case WARNING: case WARNING:
System.err.println("[WARNING] " + violation.getMessage()); if (throwable == null) {
LOG.warn("Warning in class '{}': {}.", className, message);
} else {
LOG.warn("Warning in class '{}': {}.", className, message, throwable);
}
if (dieOnWarnings) { if (dieOnWarnings) {
error = true; error = true;
@ -235,7 +327,12 @@ public class CoprocessorValidator extends AbstractHBaseTool {
break; break;
case ERROR: case ERROR:
System.err.println("[ERROR] " + violation.getMessage()); if (throwable == null) {
LOG.error("Error in class '{}': {}.", className, message);
} else {
LOG.error("Error in class '{}': {}.", className, message, throwable);
}
error = true; error = true;
break; break;

View File

@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.tool.coprocessor;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hbase.thirdparty.com.google.common.base.Throwables; import org.apache.hbase.thirdparty.com.google.common.base.MoreObjects;
@InterfaceAudience.Private @InterfaceAudience.Private
public class CoprocessorViolation { public class CoprocessorViolation {
@ -29,21 +29,25 @@ public class CoprocessorViolation {
WARNING, ERROR WARNING, ERROR
} }
private final String className;
private final Severity severity; private final Severity severity;
private final String message; private final String message;
private final Throwable throwable;
public CoprocessorViolation(Severity severity, String message) { public CoprocessorViolation(String className, Severity severity, String message) {
this(severity, message, null); this(className, severity, message, null);
} }
public CoprocessorViolation(Severity severity, String message, Throwable t) { public CoprocessorViolation(String className, Severity severity, String message,
Throwable t) {
this.className = className;
this.severity = severity; this.severity = severity;
this.message = message;
this.throwable = t;
}
if (t == null) { public String getClassName() {
this.message = message; return className;
} else {
this.message = message + "\n" + Throwables.getStackTraceAsString(t);
}
} }
public Severity getSeverity() { public Severity getSeverity() {
@ -53,4 +57,18 @@ public class CoprocessorViolation {
public String getMessage() { public String getMessage() {
return message; return message;
} }
public Throwable getThrowable() {
return throwable;
}
@Override
public String toString() {
return MoreObjects.toStringHelper(this)
.add("className", className)
.add("severity", severity)
.add("message", message)
.add("throwable", throwable)
.toString();
}
} }

View File

@ -20,15 +20,29 @@ package org.apache.hadoop.hbase.tool.coprocessor;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.mockito.Mockito.doReturn;
import static org.mockito.Mockito.mock;
import java.io.IOException; import java.io.IOException;
import java.io.InputStream;
import java.io.OutputStream;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List; import java.util.List;
import java.util.Optional;
import java.util.jar.JarOutputStream;
import java.util.regex.Pattern;
import java.util.zip.ZipEntry;
import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.CoprocessorDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment;
import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.coprocessor.ObserverContext;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
@ -37,7 +51,9 @@ import org.junit.ClassRule;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.apache.hbase.thirdparty.com.google.common.base.Throwables;
import org.apache.hbase.thirdparty.com.google.common.collect.Lists; import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
import org.apache.hbase.thirdparty.com.google.common.io.ByteStreams;
@Category({ SmallTests.class }) @Category({ SmallTests.class })
@SuppressWarnings("deprecation") @SuppressWarnings("deprecation")
@ -50,6 +66,7 @@ public class CoprocessorValidatorTest {
public CoprocessorValidatorTest() { public CoprocessorValidatorTest() {
validator = new CoprocessorValidator(); validator = new CoprocessorValidator();
validator.setConf(HBaseConfiguration.create());
} }
private static ClassLoader getClassLoader() { private static ClassLoader getClassLoader() {
@ -60,36 +77,18 @@ public class CoprocessorValidatorTest {
return CoprocessorValidatorTest.class.getName() + "$" + className; return CoprocessorValidatorTest.class.getName() + "$" + className;
} }
@SuppressWarnings({"rawtypes", "unused"}) private List<CoprocessorViolation> validateClass(String className) {
private static class TestObserver implements Coprocessor {
@Override
public void start(CoprocessorEnvironment env) throws IOException {
}
@Override
public void stop(CoprocessorEnvironment env) throws IOException {
}
}
@Test
public void testFilterObservers() throws Exception {
String filterObservers = getFullClassName("TestObserver");
List<String> classNames = Lists.newArrayList(
filterObservers, getClass().getName());
List<String> filteredClassNames = validator.filterObservers(getClassLoader(), classNames);
assertEquals(1, filteredClassNames.size());
assertEquals(filterObservers, filteredClassNames.get(0));
}
private List<CoprocessorViolation> validate(String className) {
ClassLoader classLoader = getClass().getClassLoader(); ClassLoader classLoader = getClass().getClassLoader();
return validate(classLoader, className); return validateClass(classLoader, className);
} }
private List<CoprocessorViolation> validate(ClassLoader classLoader, String className) { private List<CoprocessorViolation> validateClass(ClassLoader classLoader, String className) {
List<String> classNames = Lists.newArrayList(getClass().getName() + "$" + className); List<String> classNames = Lists.newArrayList(getFullClassName(className));
return validator.validate(classLoader, classNames); List<CoprocessorViolation> violations = new ArrayList<>();
validator.validateClasses(classLoader, classNames, violations);
return violations;
} }
/* /*
@ -97,13 +96,15 @@ public class CoprocessorValidatorTest {
*/ */
@Test @Test
public void testNoSuchClass() throws IOException { public void testNoSuchClass() throws IOException {
List<CoprocessorViolation> violations = validate("NoSuchClass"); List<CoprocessorViolation> violations = validateClass("NoSuchClass");
assertEquals(1, violations.size()); assertEquals(1, violations.size());
CoprocessorViolation violation = violations.get(0); CoprocessorViolation violation = violations.get(0);
assertEquals(getFullClassName("NoSuchClass"), violation.getClassName());
assertEquals(Severity.ERROR, violation.getSeverity()); assertEquals(Severity.ERROR, violation.getSeverity());
assertTrue(violation.getMessage().contains(
"java.lang.ClassNotFoundException: " + String stackTrace = Throwables.getStackTraceAsString(violation.getThrowable());
assertTrue(stackTrace.contains("java.lang.ClassNotFoundException: " +
"org.apache.hadoop.hbase.tool.coprocessor.CoprocessorValidatorTest$NoSuchClass")); "org.apache.hadoop.hbase.tool.coprocessor.CoprocessorValidatorTest$NoSuchClass"));
} }
@ -142,14 +143,16 @@ public class CoprocessorValidatorTest {
@Test @Test
public void testMissingClass() throws IOException { public void testMissingClass() throws IOException {
MissingClassClassLoader missingClassClassLoader = new MissingClassClassLoader(); MissingClassClassLoader missingClassClassLoader = new MissingClassClassLoader();
List<CoprocessorViolation> violations = validate(missingClassClassLoader, List<CoprocessorViolation> violations = validateClass(missingClassClassLoader,
"MissingClassObserver"); "MissingClassObserver");
assertEquals(1, violations.size()); assertEquals(1, violations.size());
CoprocessorViolation violation = violations.get(0); CoprocessorViolation violation = violations.get(0);
assertEquals(getFullClassName("MissingClassObserver"), violation.getClassName());
assertEquals(Severity.ERROR, violation.getSeverity()); assertEquals(Severity.ERROR, violation.getSeverity());
assertTrue(violation.getMessage().contains(
"java.lang.ClassNotFoundException: " + String stackTrace = Throwables.getStackTraceAsString(violation.getThrowable());
assertTrue(stackTrace.contains("java.lang.ClassNotFoundException: " +
"org.apache.hadoop.hbase.tool.coprocessor.CoprocessorValidatorTest$MissingClass")); "org.apache.hadoop.hbase.tool.coprocessor.CoprocessorValidatorTest$MissingClass"));
} }
@ -167,11 +170,96 @@ public class CoprocessorValidatorTest {
@Test @Test
public void testObsoleteMethod() throws IOException { public void testObsoleteMethod() throws IOException {
List<CoprocessorViolation> violations = validate("ObsoleteMethodObserver"); List<CoprocessorViolation> violations = validateClass("ObsoleteMethodObserver");
assertEquals(1, violations.size()); assertEquals(1, violations.size());
CoprocessorViolation violation = violations.get(0); CoprocessorViolation violation = violations.get(0);
assertEquals(Severity.WARNING, violation.getSeverity()); assertEquals(Severity.WARNING, violation.getSeverity());
assertEquals(getFullClassName("ObsoleteMethodObserver"), violation.getClassName());
assertTrue(violation.getMessage().contains("was removed from new coprocessor API")); assertTrue(violation.getMessage().contains("was removed from new coprocessor API"));
} }
private List<CoprocessorViolation> validateTable(String jarFile, String className)
throws IOException {
Pattern pattern = Pattern.compile(".*");
Admin admin = mock(Admin.class);
TableDescriptor tableDescriptor = mock(TableDescriptor.class);
List<TableDescriptor> tableDescriptors = Lists.newArrayList(tableDescriptor);
doReturn(tableDescriptors).when(admin).listTableDescriptors(pattern);
CoprocessorDescriptor coprocessorDescriptor = mock(CoprocessorDescriptor.class);
List<CoprocessorDescriptor> coprocessorDescriptors =
Lists.newArrayList(coprocessorDescriptor);
doReturn(coprocessorDescriptors).when(tableDescriptor).getCoprocessorDescriptors();
doReturn(getFullClassName(className)).when(coprocessorDescriptor).getClassName();
doReturn(Optional.ofNullable(jarFile)).when(coprocessorDescriptor).getJarPath();
List<CoprocessorViolation> violations = new ArrayList<>();
validator.validateTables(getClassLoader(), admin, pattern, violations);
return violations;
}
@Test
public void testTableNoSuchClass() throws IOException {
List<CoprocessorViolation> violations = validateTable(null, "NoSuchClass");
assertEquals(1, violations.size());
CoprocessorViolation violation = violations.get(0);
assertEquals(getFullClassName("NoSuchClass"), violation.getClassName());
assertEquals(Severity.ERROR, violation.getSeverity());
String stackTrace = Throwables.getStackTraceAsString(violation.getThrowable());
assertTrue(stackTrace.contains("java.lang.ClassNotFoundException: " +
"org.apache.hadoop.hbase.tool.coprocessor.CoprocessorValidatorTest$NoSuchClass"));
}
@Test
public void testTableMissingJar() throws IOException {
List<CoprocessorViolation> violations = validateTable("no such file", "NoSuchClass");
assertEquals(1, violations.size());
CoprocessorViolation violation = violations.get(0);
assertEquals(getFullClassName("NoSuchClass"), violation.getClassName());
assertEquals(Severity.ERROR, violation.getSeverity());
assertTrue(violation.getMessage().contains("could not validate jar file 'no such file'"));
}
@Test
public void testTableValidJar() throws IOException {
Path outputDirectory = Paths.get("target", "test-classes");
String className = getFullClassName("ObsoleteMethodObserver");
Path classFile = Paths.get(className.replace('.', '/') + ".class");
Path fullClassFile = outputDirectory.resolve(classFile);
Path tempJarFile = Files.createTempFile("coprocessor-validator-test-", ".jar");
try {
try (OutputStream fileStream = Files.newOutputStream(tempJarFile);
JarOutputStream jarStream = new JarOutputStream(fileStream);
InputStream classStream = Files.newInputStream(fullClassFile)) {
ZipEntry entry = new ZipEntry(classFile.toString());
jarStream.putNextEntry(entry);
ByteStreams.copy(classStream, jarStream);
}
String tempJarFileUri = tempJarFile.toUri().toString();
List<CoprocessorViolation> violations =
validateTable(tempJarFileUri, "ObsoleteMethodObserver");
assertEquals(1, violations.size());
CoprocessorViolation violation = violations.get(0);
assertEquals(getFullClassName("ObsoleteMethodObserver"), violation.getClassName());
assertEquals(Severity.WARNING, violation.getSeverity());
assertTrue(violation.getMessage().contains("was removed from new coprocessor API"));
} finally {
Files.delete(tempJarFile);
}
}
} }

View File

@ -858,14 +858,19 @@ whether the old co-processors are still compatible with the actual HBase version
[source, bash] [source, bash]
---- ----
$ bin/hbase pre-upgrade validate-cp <jar> -scan|<classes> $ bin/hbase pre-upgrade validate-cp [-jar ...] [-class ... | -table ... | -config]
Options: Options:
-e Treat warnings as errors. -e Treat warnings as errors.
-scan Scan jar for observers. -jar <arg> Jar file/directory of the coprocessor.
-table <arg> Table coprocessor(s) to check.
-class <arg> Coprocessor class(es) to check.
-config Scan jar for observers.
---- ----
The first parameter of the tool is the `jar` file which holds the co-processor implementation. Further parameters can be `-scan` when the tool will The co-processor classes can be explicitly declared by `-class` option, or they can be obtained from HBase configuration by `-config` option.
search the jar file for `Coprocessor` implementations or the `classes` can be explicitly given. Table level co-processors can be also checked by `-table` option. The tool searches for co-processors on its classpath, but it can be extended
by the `-jar` option. It is possible to test multiple classes with multiple `-class`, multiple tables with multiple `-table` options as well as
adding multiple jars to the classpath with multiple `-jar` options.
The tool can report errors and warnings. Errors mean that HBase won't be able to load the coprocessor, because it is incompatible with the current version The tool can report errors and warnings. Errors mean that HBase won't be able to load the coprocessor, because it is incompatible with the current version
of HBase. Warnings mean that the co-processors can be loaded, but they won't work as expected. If `-e` option is given, then the tool will also fail of HBase. Warnings mean that the co-processors can be loaded, but they won't work as expected. If `-e` option is given, then the tool will also fail
@ -877,9 +882,18 @@ For example:
[source, bash] [source, bash]
---- ----
$ bin/hbase pre-upgrade validate-cp my-coprocessor.jar MyMasterObserver MyRegionObserver $ bin/hbase pre-upgrade validate-cp -jar my-coprocessor.jar -class MyMasterObserver -class MyRegionObserver
---- ----
It validates `MyMasterObserver` and `MyRegionObserver` classes which are located in `my-coprocessor.jar`.
[source, bash]
----
$ bin/hbase pre-upgrade validate-cp -table .*
----
It validates every table level co-processors where the table name matches to `.*` regular expression.
==== DataBlockEncoding validation ==== DataBlockEncoding validation
HBase 2.0 removed `PREFIX_TREE` Data Block Encoding from column families. HBase 2.0 removed `PREFIX_TREE` Data Block Encoding from column families.
To verify that none of the column families are using incompatible Data Block Encodings in the cluster run the following command. To verify that none of the column families are using incompatible Data Block Encodings in the cluster run the following command.