HBASE-7109 integration tests on cluster are not getting picked up from distribution
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1412348 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
4d7870dcc4
commit
4442800016
|
@ -0,0 +1,232 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileFilter;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.net.URL;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Enumeration;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
import java.util.jar.*;
|
||||
import java.util.regex.Matcher;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
/**
|
||||
* A class that finds a set of classes that are locally accessible
|
||||
* (from .class or .jar files), and satisfy the conditions that are
|
||||
* imposed by name and class filters provided by the user.
|
||||
*/
|
||||
public class ClassFinder {
|
||||
private static final Log LOG = LogFactory.getLog(ClassFinder.class);
|
||||
private static String CLASS_EXT = ".class";
|
||||
|
||||
private FileNameFilter fileNameFilter;
|
||||
private ClassFilter classFilter;
|
||||
private FileFilter fileFilter;
|
||||
|
||||
public static interface FileNameFilter {
|
||||
public boolean isCandidateFile(String fileName, String absFilePath);
|
||||
};
|
||||
|
||||
public static interface ClassFilter {
|
||||
public boolean isCandidateClass(Class<?> c);
|
||||
};
|
||||
|
||||
public ClassFinder(FileNameFilter fileNameFilter, ClassFilter classFilter) {
|
||||
this.classFilter = classFilter;
|
||||
this.fileNameFilter = fileNameFilter;
|
||||
this.fileFilter = new FileFilterWithName(fileNameFilter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds the classes in current package (of ClassFinder) and nested packages.
|
||||
* @param proceedOnExceptions whether to ignore exceptions encountered for
|
||||
* individual jars/files/classes, and proceed looking for others.
|
||||
*/
|
||||
public Set<Class<?>> findClasses(boolean proceedOnExceptions)
|
||||
throws ClassNotFoundException, IOException, LinkageError {
|
||||
return findClasses(this.getClass().getPackage().getName(), proceedOnExceptions);
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds the classes in a package and nested packages.
|
||||
* @param packageName package names
|
||||
* @param proceedOnExceptions whether to ignore exceptions encountered for
|
||||
* individual jars/files/classes, and proceed looking for others.
|
||||
*/
|
||||
public Set<Class<?>> findClasses(String packageName, boolean proceedOnExceptions)
|
||||
throws ClassNotFoundException, IOException, LinkageError {
|
||||
final String path = packageName.replace('.', '/');
|
||||
final Pattern jarResourceRe = Pattern.compile("^file:(.+\\.jar)!/" + path + "$");
|
||||
|
||||
Enumeration<URL> resources = ClassLoader.getSystemClassLoader().getResources(path);
|
||||
List<File> dirs = new ArrayList<File>();
|
||||
List<String> jars = new ArrayList<String>();
|
||||
|
||||
while (resources.hasMoreElements()) {
|
||||
URL resource = resources.nextElement();
|
||||
String resourcePath = resource.getFile();
|
||||
Matcher matcher = jarResourceRe.matcher(resourcePath);
|
||||
if (matcher.find()) {
|
||||
jars.add(matcher.group(1));
|
||||
} else {
|
||||
dirs.add(new File(resource.getFile()));
|
||||
}
|
||||
}
|
||||
|
||||
Set<Class<?>> classes = new HashSet<Class<?>>();
|
||||
for (File directory : dirs) {
|
||||
classes.addAll(findClassesFromFiles(directory, packageName, proceedOnExceptions));
|
||||
}
|
||||
for (String jarFileName : jars) {
|
||||
classes.addAll(findClassesFromJar(jarFileName, packageName, proceedOnExceptions));
|
||||
}
|
||||
return classes;
|
||||
}
|
||||
|
||||
private Set<Class<?>> findClassesFromJar(String jarFileName,
|
||||
String packageName, boolean proceedOnExceptions)
|
||||
throws IOException, ClassNotFoundException, LinkageError {
|
||||
JarInputStream jarFile = null;
|
||||
try {
|
||||
jarFile = new JarInputStream(new FileInputStream(jarFileName));
|
||||
} catch (IOException ioEx) {
|
||||
if (!proceedOnExceptions) {
|
||||
throw ioEx;
|
||||
}
|
||||
LOG.error("Failed to look for classes in " + jarFileName + ": " + ioEx);
|
||||
}
|
||||
|
||||
Set<Class<?>> classes = new HashSet<Class<?>>();
|
||||
JarEntry entry = null;
|
||||
while (true) {
|
||||
try {
|
||||
entry = jarFile.getNextJarEntry();
|
||||
} catch (IOException ioEx) {
|
||||
if (!proceedOnExceptions) {
|
||||
throw ioEx;
|
||||
}
|
||||
LOG.error("Failed to get next entry from " + jarFileName + ": " + ioEx);
|
||||
break;
|
||||
}
|
||||
if (entry == null) {
|
||||
break; // loop termination condition
|
||||
}
|
||||
|
||||
String className = entry.getName();
|
||||
if (!className.endsWith(CLASS_EXT)) {
|
||||
continue;
|
||||
}
|
||||
int ix = className.lastIndexOf('/');
|
||||
String fileName = (ix >= 0) ? className.substring(ix + 1) : className;
|
||||
if (!this.fileNameFilter.isCandidateFile(fileName, className)) {
|
||||
continue;
|
||||
}
|
||||
className = className
|
||||
.substring(0, className.length() - CLASS_EXT.length()).replace('/', '.');
|
||||
if (!className.startsWith(packageName)) {
|
||||
continue;
|
||||
}
|
||||
Class<?> c = makeClass(className, proceedOnExceptions);
|
||||
if (c != null) {
|
||||
if (!classes.add(c)) {
|
||||
LOG.error("Ignoring duplicate class " + className);
|
||||
}
|
||||
}
|
||||
}
|
||||
return classes;
|
||||
}
|
||||
|
||||
private Set<Class<?>> findClassesFromFiles(File baseDirectory, String packageName,
|
||||
boolean proceedOnExceptions) throws ClassNotFoundException, LinkageError {
|
||||
Set<Class<?>> classes = new HashSet<Class<?>>();
|
||||
if (!baseDirectory.exists()) {
|
||||
LOG.error("Failed to find " + baseDirectory.getAbsolutePath());
|
||||
return classes;
|
||||
}
|
||||
|
||||
File[] files = baseDirectory.listFiles(this.fileFilter);
|
||||
if (files == null) {
|
||||
LOG.error("Failed to get files from " + baseDirectory.getAbsolutePath());
|
||||
return classes;
|
||||
}
|
||||
|
||||
for (File file : files) {
|
||||
final String fileName = file.getName();
|
||||
if (file.isDirectory()) {
|
||||
classes.addAll(findClassesFromFiles(file, packageName + "." + fileName,
|
||||
proceedOnExceptions));
|
||||
} else {
|
||||
String className = packageName + '.'
|
||||
+ fileName.substring(0, fileName.length() - CLASS_EXT.length());
|
||||
Class<?> c = makeClass(className, proceedOnExceptions);
|
||||
if (c != null) {
|
||||
if (!classes.add(c)) {
|
||||
LOG.error("Ignoring duplicate class " + className);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return classes;
|
||||
}
|
||||
|
||||
private Class<?> makeClass(String className, boolean proceedOnExceptions)
|
||||
throws ClassNotFoundException, LinkageError {
|
||||
try {
|
||||
Class<?> c = Class.forName(className, false, this.getClass().getClassLoader());
|
||||
return classFilter.isCandidateClass(c) ? c : null;
|
||||
} catch (ClassNotFoundException classNotFoundEx) {
|
||||
if (!proceedOnExceptions) {
|
||||
throw classNotFoundEx;
|
||||
}
|
||||
LOG.error("Failed to instantiate or check " + className + ": " + classNotFoundEx);
|
||||
} catch (LinkageError linkageEx) {
|
||||
if (!proceedOnExceptions) {
|
||||
throw linkageEx;
|
||||
}
|
||||
LOG.error("Failed to instantiate or check " + className + ": " + linkageEx);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private class FileFilterWithName implements FileFilter {
|
||||
private FileNameFilter nameFilter;
|
||||
|
||||
public FileFilterWithName(FileNameFilter nameFilter) {
|
||||
this.nameFilter = nameFilter;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean accept(File file) {
|
||||
return file.isDirectory()
|
||||
|| (file.getName().endsWith(CLASS_EXT)
|
||||
&& nameFilter.isCandidateFile(file.getName(), file.getAbsolutePath()));
|
||||
}
|
||||
};
|
||||
};
|
|
@ -0,0 +1,116 @@
|
|||
/**
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase;
|
||||
|
||||
import java.lang.reflect.Method;
|
||||
import java.lang.reflect.Modifier;
|
||||
import java.util.regex.Pattern;
|
||||
|
||||
import org.apache.hadoop.hbase.ClassFinder.ClassFilter;
|
||||
import org.apache.hadoop.hbase.ClassFinder.FileNameFilter;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.junit.runners.Suite;
|
||||
|
||||
/**
|
||||
* ClassFinder that is pre-configured with filters that will only allow test classes.
|
||||
* The name is strange because a logical name would start with "Test" and be confusing.
|
||||
*/
|
||||
public class ClassTestFinder extends ClassFinder {
|
||||
|
||||
public ClassTestFinder() {
|
||||
super(new TestFileNameFilter(), new TestClassFilter());
|
||||
}
|
||||
|
||||
public ClassTestFinder(Class<?> category) {
|
||||
super(new TestFileNameFilter(), new TestClassFilter(category));
|
||||
}
|
||||
|
||||
public static Class<?>[] getCategoryAnnotations(Class<?> c) {
|
||||
Category category = c.getAnnotation(Category.class);
|
||||
if (category != null) {
|
||||
return category.value();
|
||||
}
|
||||
return new Class<?>[0];
|
||||
}
|
||||
|
||||
private static class TestFileNameFilter implements FileNameFilter {
|
||||
private static final Pattern hadoopCompactRe =
|
||||
Pattern.compile("hbase-hadoop\\d?-compat");
|
||||
|
||||
@Override
|
||||
public boolean isCandidateFile(String fileName, String absFilePath) {
|
||||
boolean isTestFile = fileName.startsWith("Test")
|
||||
|| fileName.startsWith("IntegrationTest");
|
||||
return isTestFile && !hadoopCompactRe.matcher(absFilePath).find();
|
||||
}
|
||||
};
|
||||
|
||||
/*
|
||||
* A class is considered as a test class if:
|
||||
* - it's not Abstract AND
|
||||
* - one or more of its methods is annotated with org.junit.Test OR
|
||||
* - the class is annotated with Suite.SuiteClasses
|
||||
* */
|
||||
private static class TestClassFilter implements ClassFilter {
|
||||
private Class<?> categoryAnnotation = null;
|
||||
public TestClassFilter(Class<?> categoryAnnotation) {
|
||||
this.categoryAnnotation = categoryAnnotation;
|
||||
}
|
||||
|
||||
public TestClassFilter() {
|
||||
this(null);
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean isCandidateClass(Class<?> c) {
|
||||
return isTestClass(c) && isCategorizedClass(c);
|
||||
}
|
||||
|
||||
private boolean isTestClass(Class<?> c) {
|
||||
if (Modifier.isAbstract(c.getModifiers())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (c.getAnnotation(Suite.SuiteClasses.class) != null) {
|
||||
return true;
|
||||
}
|
||||
|
||||
for (Method met : c.getMethods()) {
|
||||
if (met.getAnnotation(Test.class) != null) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
private boolean isCategorizedClass(Class<?> c) {
|
||||
if (this.categoryAnnotation == null) {
|
||||
return true;
|
||||
}
|
||||
for (Class<?> cc : getCategoryAnnotations(c)) {
|
||||
if (cc.equals(this.categoryAnnotation)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
};
|
||||
};
|
|
@ -0,0 +1,156 @@
|
|||
/**
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
import java.util.UUID;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
|
||||
/**
|
||||
* Common helpers for testing HBase that do not depend on specific server/etc. things.
|
||||
* @see {@link HBaseTestingUtility}
|
||||
*
|
||||
*/
|
||||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Unstable
|
||||
public class HBaseCommonTestingUtility {
|
||||
protected static final Log LOG = LogFactory.getLog(HBaseCommonTestingUtility.class);
|
||||
|
||||
/**
|
||||
* System property key to get base test directory value
|
||||
*/
|
||||
public static final String BASE_TEST_DIRECTORY_KEY =
|
||||
"test.build.data.basedirectory";
|
||||
|
||||
/**
|
||||
* Default base directory for test output.
|
||||
*/
|
||||
public static final String DEFAULT_BASE_TEST_DIRECTORY = "target/test-data";
|
||||
|
||||
/** Directory where we put the data for this instance of HBaseTestingUtility*/
|
||||
private File dataTestDir = null;
|
||||
|
||||
/**
|
||||
* @return Where to write test data on local filesystem, specific to
|
||||
* the test. Useful for tests that do not use a cluster.
|
||||
* Creates it if it does not exist already.
|
||||
* @see #getTestFileSystem()
|
||||
*/
|
||||
public Path getDataTestDir() {
|
||||
if (this.dataTestDir == null){
|
||||
setupDataTestDir();
|
||||
}
|
||||
return new Path(this.dataTestDir.getAbsolutePath());
|
||||
}
|
||||
|
||||
/**
|
||||
* @param subdirName
|
||||
* @return Path to a subdirectory named <code>subdirName</code> under
|
||||
* {@link #getDataTestDir()}.
|
||||
* Does *NOT* create it if it does not exist.
|
||||
*/
|
||||
public Path getDataTestDir(final String subdirName) {
|
||||
return new Path(getDataTestDir(), subdirName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Sets up a directory for a test to use.
|
||||
*
|
||||
* @return New directory path, if created.
|
||||
*/
|
||||
protected Path setupDataTestDir() {
|
||||
if (this.dataTestDir != null) {
|
||||
LOG.warn("Data test dir already setup in " +
|
||||
dataTestDir.getAbsolutePath());
|
||||
return null;
|
||||
}
|
||||
|
||||
String randomStr = UUID.randomUUID().toString();
|
||||
Path testPath= new Path(getBaseTestDir(), randomStr);
|
||||
|
||||
this.dataTestDir = new File(testPath.toString()).getAbsoluteFile();
|
||||
this.dataTestDir.deleteOnExit();
|
||||
return testPath;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return True if we removed the test dirs
|
||||
* @throws IOException
|
||||
*/
|
||||
boolean cleanupTestDir() throws IOException {
|
||||
if (deleteDir(this.dataTestDir)) {
|
||||
this.dataTestDir = null;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param subdir Test subdir name.
|
||||
* @return True if we removed the test dir
|
||||
* @throws IOException
|
||||
*/
|
||||
boolean cleanupTestDir(final String subdir) throws IOException {
|
||||
if (this.dataTestDir == null){
|
||||
return false;
|
||||
}
|
||||
return deleteDir(new File(this.dataTestDir, subdir));
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Where to write test data on local filesystem; usually
|
||||
* {@link #DEFAULT_BASE_TEST_DIRECTORY}
|
||||
* Should not be used by the unit tests, hence its's private.
|
||||
* Unit test will use a subdirectory of this directory.
|
||||
* @see #setupDataTestDir()
|
||||
* @see #getTestFileSystem()
|
||||
*/
|
||||
private Path getBaseTestDir() {
|
||||
String PathName = System.getProperty(
|
||||
BASE_TEST_DIRECTORY_KEY, DEFAULT_BASE_TEST_DIRECTORY);
|
||||
|
||||
return new Path(PathName);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param dir Directory to delete
|
||||
* @return True if we deleted it.
|
||||
* @throws IOException
|
||||
*/
|
||||
boolean deleteDir(final File dir) throws IOException {
|
||||
if (dir != null && !dir.exists()) {
|
||||
return true;
|
||||
}
|
||||
try {
|
||||
FileUtils.deleteDirectory(dir);
|
||||
return true;
|
||||
} catch (IOException ex) {
|
||||
LOG.warn("Failed to delete " + dir.getAbsolutePath());
|
||||
return false;
|
||||
}
|
||||
}
|
||||
};
|
|
@ -0,0 +1,347 @@
|
|||
/**
|
||||
*
|
||||
* Licensed to the Apache Software Foundation (ASF) under one
|
||||
* or more contributor license agreements. See the NOTICE file
|
||||
* distributed with this work for additional information
|
||||
* regarding copyright ownership. The ASF licenses this file
|
||||
* to you under the Apache License, Version 2.0 (the
|
||||
* "License"); you may not use this file except in compliance
|
||||
* with the License. You may obtain a copy of the License at
|
||||
*
|
||||
* http://www.apache.org/licenses/LICENSE-2.0
|
||||
*
|
||||
* Unless required by applicable law or agreed to in writing, software
|
||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
|
||||
import static org.junit.Assert.*;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileInputStream;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.io.PrintStream;
|
||||
import java.lang.reflect.Method;
|
||||
import java.net.URL;
|
||||
import java.net.URLClassLoader;
|
||||
import java.util.*;
|
||||
import java.util.concurrent.atomic.AtomicLong;
|
||||
import java.util.jar.*;
|
||||
import javax.tools.*;
|
||||
|
||||
import org.apache.hadoop.hbase.SmallTests;
|
||||
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
|
||||
import org.apache.commons.io.FileUtils;
|
||||
|
||||
@Category(SmallTests.class)
|
||||
public class TestClassFinder {
|
||||
private static final HBaseCommonTestingUtility testUtil = new HBaseCommonTestingUtility();
|
||||
private static final String BASEPKG = "tfcpkg";
|
||||
|
||||
// Use unique jar/class/package names in each test case with the help
|
||||
// of these global counters; we are mucking with ClassLoader in this test
|
||||
// and we don't want individual test cases to conflict via it.
|
||||
private static AtomicLong testCounter = new AtomicLong(0);
|
||||
private static AtomicLong jarCounter = new AtomicLong(0);
|
||||
|
||||
|
||||
private static String basePath = null;
|
||||
|
||||
// Default name/class filters for testing.
|
||||
private static final ClassFinder.FileNameFilter trueNameFilter =
|
||||
new ClassFinder.FileNameFilter() {
|
||||
@Override
|
||||
public boolean isCandidateFile(String fileName, String absFilePath) {
|
||||
return true;
|
||||
}
|
||||
};
|
||||
private static final ClassFinder.ClassFilter trueClassFilter =
|
||||
new ClassFinder.ClassFilter() {
|
||||
@Override
|
||||
public boolean isCandidateClass(Class<?> c) {
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
@BeforeClass
|
||||
public static void createTestDir() throws IOException {
|
||||
basePath = testUtil.getDataTestDir(TestClassFinder.class.getSimpleName()).toString();
|
||||
if (!basePath.endsWith("/")) {
|
||||
basePath += "/";
|
||||
}
|
||||
// Make sure we get a brand new directory.
|
||||
File testDir = new File(basePath);
|
||||
if (testDir.exists()) {
|
||||
deleteTestDir();
|
||||
}
|
||||
assertTrue(testDir.mkdirs());
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void deleteTestDir() throws IOException {
|
||||
testUtil.cleanupTestDir(TestClassFinder.class.getSimpleName());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testClassFinderCanFindClassesInJars() throws Exception {
|
||||
long counter = testCounter.incrementAndGet();
|
||||
FileAndPath c1 = compileTestClass(counter, "", "c1");
|
||||
FileAndPath c2 = compileTestClass(counter, ".nested", "c2");
|
||||
FileAndPath c3 = compileTestClass(counter, "", "c3");
|
||||
packageAndLoadJar(c1, c3);
|
||||
packageAndLoadJar(c2);
|
||||
|
||||
ClassFinder allClassesFinder = new ClassFinder(trueNameFilter, trueClassFilter);
|
||||
Set<Class<?>> allClasses = allClassesFinder.findClasses(
|
||||
makePackageName("", counter), false);
|
||||
assertEquals(3, allClasses.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testClassFinderHandlesConflicts() throws Exception {
|
||||
long counter = testCounter.incrementAndGet();
|
||||
FileAndPath c1 = compileTestClass(counter, "", "c1");
|
||||
FileAndPath c2 = compileTestClass(counter, "", "c2");
|
||||
packageAndLoadJar(c1, c2);
|
||||
packageAndLoadJar(c1);
|
||||
|
||||
ClassFinder allClassesFinder = new ClassFinder(trueNameFilter, trueClassFilter);
|
||||
Set<Class<?>> allClasses = allClassesFinder.findClasses(
|
||||
makePackageName("", counter), false);
|
||||
assertEquals(2, allClasses.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testClassFinderHandlesNestedPackages() throws Exception {
|
||||
final String NESTED = ".nested";
|
||||
final String CLASSNAME1 = "c2";
|
||||
final String CLASSNAME2 = "c3";
|
||||
long counter = testCounter.incrementAndGet();
|
||||
FileAndPath c1 = compileTestClass(counter, "", "c1");
|
||||
FileAndPath c2 = compileTestClass(counter, NESTED, CLASSNAME1);
|
||||
FileAndPath c3 = compileTestClass(counter, NESTED, CLASSNAME2);
|
||||
packageAndLoadJar(c1, c2);
|
||||
packageAndLoadJar(c3);
|
||||
|
||||
ClassFinder allClassesFinder = new ClassFinder(trueNameFilter, trueClassFilter);
|
||||
Set<Class<?>> nestedClasses = allClassesFinder.findClasses(
|
||||
makePackageName(NESTED, counter), false);
|
||||
assertEquals(2, nestedClasses.size());
|
||||
Class<?> nestedClass1 = makeClass(NESTED, CLASSNAME1, counter);
|
||||
assertTrue(nestedClasses.contains(nestedClass1));
|
||||
Class<?> nestedClass2 = makeClass(NESTED, CLASSNAME2, counter);
|
||||
assertTrue(nestedClasses.contains(nestedClass2));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testClassFinderFiltersByNameInJar() throws Exception {
|
||||
final String CLASSNAME = "c1";
|
||||
final String CLASSNAMEEXCPREFIX = "c2";
|
||||
long counter = testCounter.incrementAndGet();
|
||||
FileAndPath c1 = compileTestClass(counter, "", CLASSNAME);
|
||||
FileAndPath c2 = compileTestClass(counter, "", CLASSNAMEEXCPREFIX + "1");
|
||||
FileAndPath c3 = compileTestClass(counter, "", CLASSNAMEEXCPREFIX + "2");
|
||||
packageAndLoadJar(c1, c2, c3);
|
||||
|
||||
ClassFinder.FileNameFilter notExcNameFilter = new ClassFinder.FileNameFilter() {
|
||||
@Override
|
||||
public boolean isCandidateFile(String fileName, String absFilePath) {
|
||||
return !fileName.startsWith(CLASSNAMEEXCPREFIX);
|
||||
}
|
||||
};
|
||||
ClassFinder incClassesFinder = new ClassFinder(notExcNameFilter, trueClassFilter);
|
||||
Set<Class<?>> incClasses = incClassesFinder.findClasses(
|
||||
makePackageName("", counter), false);
|
||||
assertEquals(1, incClasses.size());
|
||||
Class<?> incClass = makeClass("", CLASSNAME, counter);
|
||||
assertTrue(incClasses.contains(incClass));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testClassFinderFiltersByClassInJar() throws Exception {
|
||||
final String CLASSNAME = "c1";
|
||||
final String CLASSNAMEEXCPREFIX = "c2";
|
||||
long counter = testCounter.incrementAndGet();
|
||||
FileAndPath c1 = compileTestClass(counter, "", CLASSNAME);
|
||||
FileAndPath c2 = compileTestClass(counter, "", CLASSNAMEEXCPREFIX + "1");
|
||||
FileAndPath c3 = compileTestClass(counter, "", CLASSNAMEEXCPREFIX + "2");
|
||||
packageAndLoadJar(c1, c2, c3);
|
||||
|
||||
final ClassFinder.ClassFilter notExcClassFilter = new ClassFinder.ClassFilter() {
|
||||
@Override
|
||||
public boolean isCandidateClass(Class<?> c) {
|
||||
return !c.getSimpleName().startsWith(CLASSNAMEEXCPREFIX);
|
||||
}
|
||||
};
|
||||
ClassFinder incClassesFinder = new ClassFinder(trueNameFilter, notExcClassFilter);
|
||||
Set<Class<?>> incClasses = incClassesFinder.findClasses(
|
||||
makePackageName("", counter), false);
|
||||
assertEquals(1, incClasses.size());
|
||||
Class<?> incClass = makeClass("", CLASSNAME, counter);
|
||||
assertTrue(incClasses.contains(incClass));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testClassFinderCanFindClassesInDirs() throws Exception {
|
||||
// Well, technically, we are not guaranteed that the classes will
|
||||
// be in dirs, but during normal build they would be.
|
||||
ClassFinder allClassesFinder = new ClassFinder(trueNameFilter, trueClassFilter);
|
||||
Set<Class<?>> allClasses = allClassesFinder.findClasses(
|
||||
this.getClass().getPackage().getName(), false);
|
||||
assertTrue(allClasses.contains(this.getClass()));
|
||||
assertTrue(allClasses.contains(ClassFinder.class));
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testClassFinderFiltersByNameInDirs() throws Exception {
|
||||
final String thisName = this.getClass().getSimpleName();
|
||||
ClassFinder.FileNameFilter notThisFilter = new ClassFinder.FileNameFilter() {
|
||||
@Override
|
||||
public boolean isCandidateFile(String fileName, String absFilePath) {
|
||||
return !fileName.equals(thisName + ".class");
|
||||
}
|
||||
};
|
||||
String thisPackage = this.getClass().getPackage().getName();
|
||||
ClassFinder allClassesFinder = new ClassFinder(trueNameFilter, trueClassFilter);
|
||||
Set<Class<?>> allClasses = allClassesFinder.findClasses(thisPackage, false);
|
||||
ClassFinder notThisClassFinder = new ClassFinder(notThisFilter, trueClassFilter);
|
||||
Set<Class<?>> notAllClasses = notThisClassFinder.findClasses(thisPackage, false);
|
||||
assertFalse(notAllClasses.contains(this.getClass()));
|
||||
assertEquals(allClasses.size() - 1, notAllClasses.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testClassFinderFiltersByClassInDirs() throws Exception {
|
||||
ClassFinder.ClassFilter notThisFilter = new ClassFinder.ClassFilter() {
|
||||
@Override
|
||||
public boolean isCandidateClass(Class<?> c) {
|
||||
return c != TestClassFinder.class;
|
||||
}
|
||||
};
|
||||
String thisPackage = this.getClass().getPackage().getName();
|
||||
ClassFinder allClassesFinder = new ClassFinder(trueNameFilter, trueClassFilter);
|
||||
Set<Class<?>> allClasses = allClassesFinder.findClasses(thisPackage, false);
|
||||
ClassFinder notThisClassFinder = new ClassFinder(trueNameFilter, notThisFilter);
|
||||
Set<Class<?>> notAllClasses = notThisClassFinder.findClasses(thisPackage, false);
|
||||
assertFalse(notAllClasses.contains(this.getClass()));
|
||||
assertEquals(allClasses.size() - 1, notAllClasses.size());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testClassFinderDefaultsToOwnPackage() throws Exception {
|
||||
// Correct handling of nested packages is tested elsewhere, so here we just assume
|
||||
// pkgClasses is the correct answer that we don't have to check.
|
||||
ClassFinder allClassesFinder = new ClassFinder(trueNameFilter, trueClassFilter);
|
||||
Set<Class<?>> pkgClasses = allClassesFinder.findClasses(
|
||||
ClassFinder.class.getPackage().getName(), false);
|
||||
Set<Class<?>> defaultClasses = allClassesFinder.findClasses(false);
|
||||
assertArrayEquals(pkgClasses.toArray(), defaultClasses.toArray());
|
||||
}
|
||||
|
||||
private static class FileAndPath {
|
||||
String path;
|
||||
File file;
|
||||
public FileAndPath(String path, File file) {
|
||||
this.file = file;
|
||||
this.path = path;
|
||||
}
|
||||
}
|
||||
|
||||
private static Class<?> makeClass(String nestedPkgSuffix,
|
||||
String className, long counter) throws ClassNotFoundException {
|
||||
return Class.forName(
|
||||
makePackageName(nestedPkgSuffix, counter) + "." + className + counter);
|
||||
}
|
||||
|
||||
private static String makePackageName(String nestedSuffix, long counter) {
|
||||
return BASEPKG + counter + nestedSuffix;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compiles the test class with bogus code into a .class file.
|
||||
* Unfortunately it's very tedious.
|
||||
* @param counter Unique test counter.
|
||||
* @param packageNameSuffix Package name suffix (e.g. ".suffix") for nesting, or "".
|
||||
* @return The resulting .class file and the location in jar it is supposed to go to.
|
||||
*/
|
||||
private static FileAndPath compileTestClass(long counter,
|
||||
String packageNameSuffix, String classNamePrefix) throws Exception {
|
||||
classNamePrefix = classNamePrefix + counter;
|
||||
String packageName = makePackageName(packageNameSuffix, counter);
|
||||
String javaPath = basePath + classNamePrefix + ".java";
|
||||
String classPath = basePath + classNamePrefix + ".class";
|
||||
PrintStream source = new PrintStream(javaPath);
|
||||
source.println("package " + packageName + ";");
|
||||
source.println("public class " + classNamePrefix
|
||||
+ " { public static void main(String[] args) { } };");
|
||||
source.close();
|
||||
JavaCompiler jc = ToolProvider.getSystemJavaCompiler();
|
||||
int result = jc.run(null, null, null, javaPath);
|
||||
assertEquals(0, result);
|
||||
File classFile = new File(classPath);
|
||||
assertTrue(classFile.exists());
|
||||
return new FileAndPath(packageName.replace('.', '/') + '/', classFile);
|
||||
}
|
||||
|
||||
/**
|
||||
* Makes a jar out of some class files. Unfortunately it's very tedious.
|
||||
* @param filesInJar Files created via compileTestClass.
|
||||
*/
|
||||
private static void packageAndLoadJar(FileAndPath... filesInJar) throws Exception {
|
||||
// First, write the bogus jar file.
|
||||
String path = basePath + "jar" + jarCounter.incrementAndGet() + ".jar";
|
||||
Manifest manifest = new Manifest();
|
||||
manifest.getMainAttributes().put(Attributes.Name.MANIFEST_VERSION, "1.0");
|
||||
FileOutputStream fos = new FileOutputStream(path);
|
||||
JarOutputStream jarOutputStream = new JarOutputStream(fos, manifest);
|
||||
// Directory entries for all packages have to be added explicitly for
|
||||
// resources to be findable via ClassLoader. Directory entries must end
|
||||
// with "/"; the initial one is expected to, also.
|
||||
Set<String> pathsInJar = new HashSet<String>();
|
||||
for (FileAndPath fileAndPath : filesInJar) {
|
||||
String pathToAdd = fileAndPath.path;
|
||||
while (pathsInJar.add(pathToAdd)) {
|
||||
int ix = pathToAdd.lastIndexOf('/', pathToAdd.length() - 2);
|
||||
if (ix < 0) {
|
||||
break;
|
||||
}
|
||||
pathToAdd = pathToAdd.substring(0, ix);
|
||||
}
|
||||
}
|
||||
for (String pathInJar : pathsInJar) {
|
||||
jarOutputStream.putNextEntry(new JarEntry(pathInJar));
|
||||
jarOutputStream.closeEntry();
|
||||
}
|
||||
for (FileAndPath fileAndPath : filesInJar) {
|
||||
File file = fileAndPath.file;
|
||||
jarOutputStream.putNextEntry(
|
||||
new JarEntry(fileAndPath.path + file.getName()));
|
||||
byte[] allBytes = new byte[(int)file.length()];
|
||||
FileInputStream fis = new FileInputStream(file);
|
||||
fis.read(allBytes);
|
||||
fis.close();
|
||||
jarOutputStream.write(allBytes);
|
||||
jarOutputStream.closeEntry();
|
||||
}
|
||||
jarOutputStream.close();
|
||||
fos.close();
|
||||
|
||||
// Add the file to classpath.
|
||||
File jarFile = new File(path);
|
||||
assertTrue(jarFile.exists());
|
||||
URLClassLoader urlClassLoader = (URLClassLoader)ClassLoader.getSystemClassLoader();
|
||||
Method method = URLClassLoader.class
|
||||
.getDeclaredMethod("addURL", new Class[] { URL.class });
|
||||
method.setAccessible(true);
|
||||
method.invoke(urlClassLoader, new Object[] { jarFile.toURI().toURL() });
|
||||
}
|
||||
};
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.hbase;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.util.List;
|
||||
import java.util.Set;
|
||||
|
||||
import org.apache.commons.cli.CommandLine;
|
||||
import org.apache.hadoop.hbase.util.AbstractHBaseTool;
|
||||
|
@ -28,12 +29,16 @@ import org.junit.internal.TextListener;
|
|||
import org.junit.runner.JUnitCore;
|
||||
import org.junit.runner.Result;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
|
||||
/**
|
||||
* This class drives the Integration test suite execution. Executes all
|
||||
* tests having @Category(IntegrationTests.class) annotation against an
|
||||
* already deployed distributed cluster.
|
||||
*/
|
||||
public class IntegrationTestsDriver extends AbstractHBaseTool {
|
||||
private static final Log LOG = LogFactory.getLog(IntegrationTestsDriver.class);
|
||||
|
||||
public static void main(String[] args) throws Exception {
|
||||
int ret = ToolRunner.run(new IntegrationTestsDriver(), args);
|
||||
|
@ -51,21 +56,24 @@ public class IntegrationTestsDriver extends AbstractHBaseTool {
|
|||
/**
|
||||
* Returns test classes annotated with @Category(IntegrationTests.class)
|
||||
*/
|
||||
private Class<?>[] findIntegrationTestClasses() throws ClassNotFoundException, IOException {
|
||||
TestCheckTestClasses util = new TestCheckTestClasses();
|
||||
List<Class<?>> classes = util.findTestClasses(IntegrationTests.class);
|
||||
return classes.toArray(new Class<?>[classes.size()]);
|
||||
}
|
||||
private Class<?>[] findIntegrationTestClasses()
|
||||
throws ClassNotFoundException, LinkageError, IOException {
|
||||
ClassTestFinder classFinder = new ClassTestFinder(IntegrationTests.class);
|
||||
Set<Class<?>> classes = classFinder.findClasses(true);
|
||||
return classes.toArray(new Class<?>[classes.size()]);
|
||||
}
|
||||
|
||||
|
||||
@Override
|
||||
protected int doWork() throws Exception {
|
||||
|
||||
//this is called from the command line, so we should set to use the distributed cluster
|
||||
IntegrationTestingUtility.setUseDistributedCluster(conf);
|
||||
Class<?>[] classes = findIntegrationTestClasses();
|
||||
LOG.info("Found " + classes.length + " integration tests to run");
|
||||
|
||||
JUnitCore junit = new JUnitCore();
|
||||
junit.addListener(new TextListener(System.out));
|
||||
Result result = junit.run(findIntegrationTestClasses());
|
||||
Result result = junit.run(classes);
|
||||
|
||||
return result.wasSuccessful() ? 0 : 1;
|
||||
}
|
||||
|
|
|
@ -114,10 +114,9 @@ import org.apache.zookeeper.ZooKeeper.States;
|
|||
*/
|
||||
@InterfaceAudience.Public
|
||||
@InterfaceStability.Evolving
|
||||
public class HBaseTestingUtility {
|
||||
private static final Log LOG = LogFactory.getLog(HBaseTestingUtility.class);
|
||||
private Configuration conf;
|
||||
private MiniZooKeeperCluster zkCluster = null;
|
||||
public class HBaseTestingUtility extends HBaseCommonTestingUtility {
|
||||
private Configuration conf;
|
||||
private MiniZooKeeperCluster zkCluster = null;
|
||||
|
||||
/**
|
||||
* The default number of regions per regionserver when creating a pre-split
|
||||
|
@ -140,9 +139,6 @@ public class HBaseTestingUtility {
|
|||
|
||||
private String hadoopLogDir;
|
||||
|
||||
/** Directory where we put the data for this instance of HBaseTestingUtility*/
|
||||
private File dataTestDir = null;
|
||||
|
||||
/** Directory (a subdirectory of dataTestDir) used by the dfs cluster if any */
|
||||
private File clusterTestDir = null;
|
||||
|
||||
|
@ -159,17 +155,6 @@ public class HBaseTestingUtility {
|
|||
*/
|
||||
private static final String TEST_DIRECTORY_KEY = "test.build.data";
|
||||
|
||||
/**
|
||||
* System property key to get base test directory value
|
||||
*/
|
||||
public static final String BASE_TEST_DIRECTORY_KEY =
|
||||
"test.build.data.basedirectory";
|
||||
|
||||
/**
|
||||
* Default base directory for test output.
|
||||
*/
|
||||
public static final String DEFAULT_BASE_TEST_DIRECTORY = "target/test-data";
|
||||
|
||||
/** Filesystem URI used for map-reduce mini-cluster setup */
|
||||
private static String FS_URI;
|
||||
|
||||
|
@ -244,18 +229,77 @@ public class HBaseTestingUtility {
|
|||
}
|
||||
|
||||
/**
|
||||
* @return Where to write test data on local filesystem; usually
|
||||
* {@link #DEFAULT_BASE_TEST_DIRECTORY}
|
||||
* Should not be used by the unit tests, hence its's private.
|
||||
* Unit test will use a subdirectory of this directory.
|
||||
* @see #setupDataTestDir()
|
||||
* @see #getTestFileSystem()
|
||||
* Home our data in a dir under {@link #DEFAULT_BASE_TEST_DIRECTORY}.
|
||||
* Give it a random name so can have many concurrent tests running if
|
||||
* we need to. It needs to amend the {@link #TEST_DIRECTORY_KEY}
|
||||
* System property, as it's what minidfscluster bases
|
||||
* it data dir on. Moding a System property is not the way to do concurrent
|
||||
* instances -- another instance could grab the temporary
|
||||
* value unintentionally -- but not anything can do about it at moment;
|
||||
* single instance only is how the minidfscluster works.
|
||||
*
|
||||
* We also create the underlying directory for
|
||||
* hadoop.log.dir, mapred.local.dir and hadoop.tmp.dir, and set the values
|
||||
* in the conf, and as a system property for hadoop.tmp.dir
|
||||
*
|
||||
* @return The calculated data test build directory, if newly-created.
|
||||
*/
|
||||
private Path getBaseTestDir() {
|
||||
String PathName = System.getProperty(
|
||||
BASE_TEST_DIRECTORY_KEY, DEFAULT_BASE_TEST_DIRECTORY);
|
||||
@Override
|
||||
protected Path setupDataTestDir() {
|
||||
Path testPath = super.setupDataTestDir();
|
||||
if (null == testPath) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return new Path(PathName);
|
||||
createSubDirAndSystemProperty(
|
||||
"hadoop.log.dir",
|
||||
testPath, "hadoop-log-dir");
|
||||
|
||||
// This is defaulted in core-default.xml to /tmp/hadoop-${user.name}, but
|
||||
// we want our own value to ensure uniqueness on the same machine
|
||||
createSubDirAndSystemProperty(
|
||||
"hadoop.tmp.dir",
|
||||
testPath, "hadoop-tmp-dir");
|
||||
|
||||
// Read and modified in org.apache.hadoop.mapred.MiniMRCluster
|
||||
createSubDir(
|
||||
"mapred.local.dir",
|
||||
testPath, "mapred-local-dir");
|
||||
|
||||
return testPath;
|
||||
}
|
||||
|
||||
private void createSubDir(String propertyName, Path parent, String subDirName){
|
||||
Path newPath= new Path(parent, subDirName);
|
||||
File newDir = new File(newPath.toString()).getAbsoluteFile();
|
||||
newDir.deleteOnExit();
|
||||
conf.set(propertyName, newDir.getAbsolutePath());
|
||||
}
|
||||
|
||||
private void createSubDirAndSystemProperty(
|
||||
String propertyName, Path parent, String subDirName){
|
||||
|
||||
String sysValue = System.getProperty(propertyName);
|
||||
|
||||
if (sysValue != null) {
|
||||
// There is already a value set. So we do nothing but hope
|
||||
// that there will be no conflicts
|
||||
LOG.info("System.getProperty(\""+propertyName+"\") already set to: "+
|
||||
sysValue + " so I do NOT create it in " + parent);
|
||||
String confValue = conf.get(propertyName);
|
||||
if (confValue != null && !confValue.endsWith(sysValue)){
|
||||
LOG.warn(
|
||||
propertyName + " property value differs in configuration and system: "+
|
||||
"Configuration="+confValue+" while System="+sysValue+
|
||||
" Erasing configuration value by system value."
|
||||
);
|
||||
}
|
||||
conf.set(propertyName, sysValue);
|
||||
} else {
|
||||
// Ok, it's not set, so we create it as a subdirectory
|
||||
createSubDir(propertyName, parent, subDirName);
|
||||
System.setProperty(propertyName, conf.get(propertyName));
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -269,19 +313,6 @@ public class HBaseTestingUtility {
|
|||
return new Path(fs.getWorkingDirectory(), "test-data");
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Where to write test data on local filesystem, specific to
|
||||
* the test. Useful for tests that do not use a cluster.
|
||||
* Creates it if it does not exist already.
|
||||
* @see #getTestFileSystem()
|
||||
*/
|
||||
public Path getDataTestDir() {
|
||||
if (this.dataTestDir == null){
|
||||
setupDataTestDir();
|
||||
}
|
||||
return new Path(this.dataTestDir.getAbsolutePath());
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Where the DFS cluster will write data on the local subsystem.
|
||||
* Creates it if it does not exist already. A subdir of {@link #getBaseTestDir()}
|
||||
|
@ -312,16 +343,6 @@ public class HBaseTestingUtility {
|
|||
LOG.info("Created new mini-cluster data directory: " + clusterTestDir);
|
||||
}
|
||||
|
||||
/**
|
||||
* @param subdirName
|
||||
* @return Path to a subdirectory named <code>subdirName</code> under
|
||||
* {@link #getDataTestDir()}.
|
||||
* Does *NOT* create it if it does not exist.
|
||||
*/
|
||||
public Path getDataTestDir(final String subdirName) {
|
||||
return new Path(getDataTestDir(), subdirName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns a Path in the test filesystem, obtained from {@link #getTestFileSystem()}
|
||||
* to write temporary test data. Call this method after setting up the mini dfs cluster
|
||||
|
@ -347,85 +368,6 @@ public class HBaseTestingUtility {
|
|||
return new Path(getDataTestDirOnTestFS(), subdirName);
|
||||
}
|
||||
|
||||
/**
|
||||
* Home our data in a dir under {@link #DEFAULT_BASE_TEST_DIRECTORY}.
|
||||
* Give it a random name so can have many concurrent tests running if
|
||||
* we need to. It needs to amend the {@link #TEST_DIRECTORY_KEY}
|
||||
* System property, as it's what minidfscluster bases
|
||||
* it data dir on. Moding a System property is not the way to do concurrent
|
||||
* instances -- another instance could grab the temporary
|
||||
* value unintentionally -- but not anything can do about it at moment;
|
||||
* single instance only is how the minidfscluster works.
|
||||
*
|
||||
* We also create the underlying directory for
|
||||
* hadoop.log.dir, mapred.local.dir and hadoop.tmp.dir, and set the values
|
||||
* in the conf, and as a system property for hadoop.tmp.dir
|
||||
*
|
||||
* @return The calculated data test build directory.
|
||||
*/
|
||||
private void setupDataTestDir() {
|
||||
if (this.dataTestDir != null) {
|
||||
LOG.warn("Data test dir already setup in " +
|
||||
dataTestDir.getAbsolutePath());
|
||||
return;
|
||||
}
|
||||
|
||||
String randomStr = UUID.randomUUID().toString();
|
||||
Path testPath= new Path(getBaseTestDir(), randomStr);
|
||||
|
||||
this.dataTestDir = new File(testPath.toString()).getAbsoluteFile();
|
||||
this.dataTestDir.deleteOnExit();
|
||||
|
||||
createSubDirAndSystemProperty(
|
||||
"hadoop.log.dir",
|
||||
testPath, "hadoop-log-dir");
|
||||
|
||||
// This is defaulted in core-default.xml to /tmp/hadoop-${user.name}, but
|
||||
// we want our own value to ensure uniqueness on the same machine
|
||||
createSubDirAndSystemProperty(
|
||||
"hadoop.tmp.dir",
|
||||
testPath, "hadoop-tmp-dir");
|
||||
|
||||
// Read and modified in org.apache.hadoop.mapred.MiniMRCluster
|
||||
createSubDir(
|
||||
"mapred.local.dir",
|
||||
testPath, "mapred-local-dir");
|
||||
}
|
||||
|
||||
private void createSubDir(String propertyName, Path parent, String subDirName){
|
||||
Path newPath= new Path(parent, subDirName);
|
||||
File newDir = new File(newPath.toString()).getAbsoluteFile();
|
||||
newDir.deleteOnExit();
|
||||
conf.set(propertyName, newDir.getAbsolutePath());
|
||||
}
|
||||
|
||||
private void createSubDirAndSystemProperty(
|
||||
String propertyName, Path parent, String subDirName){
|
||||
|
||||
String sysValue = System.getProperty(propertyName);
|
||||
|
||||
if (sysValue != null) {
|
||||
// There is already a value set. So we do nothing but hope
|
||||
// that there will be no conflicts
|
||||
LOG.info("System.getProperty(\""+propertyName+"\") already set to: "+
|
||||
sysValue + " so I do NOT create it in " + this.dataTestDir.getAbsolutePath());
|
||||
String confValue = conf.get(propertyName);
|
||||
if (confValue != null && !confValue.endsWith(sysValue)){
|
||||
LOG.warn(
|
||||
propertyName + " property value differs in configuration and system: "+
|
||||
"Configuration="+confValue+" while System="+sysValue+
|
||||
" Erasing configuration value by system value."
|
||||
);
|
||||
}
|
||||
conf.set(propertyName, sysValue);
|
||||
} else {
|
||||
// Ok, it's not set, so we create it as a subdirectory
|
||||
createSubDir(propertyName, parent, subDirName);
|
||||
System.setProperty(propertyName, conf.get(propertyName));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
/**
|
||||
* Sets up a path in test filesystem to be used by tests
|
||||
*/
|
||||
|
@ -442,9 +384,7 @@ public class HBaseTestingUtility {
|
|||
//the working directory, and create a unique sub dir there
|
||||
FileSystem fs = getTestFileSystem();
|
||||
if (fs.getUri().getScheme().equals(fs.getLocal(conf).getUri().getScheme())) {
|
||||
if (dataTestDir == null) {
|
||||
setupDataTestDir();
|
||||
}
|
||||
File dataTestDir = new File(getDataTestDir().toString());
|
||||
dataTestDirOnTestFS = new Path(dataTestDir.getAbsolutePath());
|
||||
} else {
|
||||
Path base = getBaseTestDirOnTestFS();
|
||||
|
@ -1705,7 +1645,7 @@ public class HBaseTestingUtility {
|
|||
//ensure that we have connection to the server before closing down, otherwise
|
||||
//the close session event will be eaten out before we start CONNECTING state
|
||||
long start = System.currentTimeMillis();
|
||||
while (newZK.getState() != States.CONNECTED
|
||||
while (newZK.getState() != States.CONNECTED
|
||||
&& System.currentTimeMillis() - start < 1000) {
|
||||
Thread.sleep(1);
|
||||
}
|
||||
|
@ -1856,48 +1796,6 @@ public class HBaseTestingUtility {
|
|||
return HFileSystem.get(conf);
|
||||
}
|
||||
|
||||
/**
|
||||
* @return True if we removed the test dirs
|
||||
* @throws IOException
|
||||
*/
|
||||
boolean cleanupTestDir() throws IOException {
|
||||
if (deleteDir(this.dataTestDir)) {
|
||||
this.dataTestDir = null;
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* @param subdir Test subdir name.
|
||||
* @return True if we removed the test dir
|
||||
* @throws IOException
|
||||
*/
|
||||
boolean cleanupTestDir(final String subdir) throws IOException {
|
||||
if (this.dataTestDir == null){
|
||||
return false;
|
||||
}
|
||||
return deleteDir(new File(this.dataTestDir, subdir));
|
||||
}
|
||||
|
||||
/**
|
||||
* @param dir Directory to delete
|
||||
* @return True if we deleted it.
|
||||
* @throws IOException
|
||||
*/
|
||||
boolean deleteDir(final File dir) throws IOException {
|
||||
if (dir != null && dir.exists()) {
|
||||
// Need to use deleteDirectory because File.delete required dir is empty.
|
||||
if (!FSUtils.deleteDirectory(FileSystem.getLocal(this.conf),
|
||||
new Path(dir.getAbsolutePath()))) {
|
||||
LOG.warn("Failed delete of " + dir.toString());
|
||||
} else {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
public void waitTableAvailable(byte[] table, long timeoutMillis)
|
||||
throws InterruptedException, IOException {
|
||||
long startWait = System.currentTimeMillis();
|
||||
|
|
|
@ -42,20 +42,6 @@ import org.junit.runners.Suite;
|
|||
*/
|
||||
@Category(SmallTests.class)
|
||||
public class TestCheckTestClasses {
|
||||
|
||||
private FileFilter TEST_CLASS_FILE_FILTER = new FileFilter() {
|
||||
@Override
|
||||
public boolean accept(File file) {
|
||||
return file.isDirectory() || isTestClassFile(file);
|
||||
|
||||
}
|
||||
private boolean isTestClassFile(File file) {
|
||||
String fileName = file.getName();
|
||||
return fileName.endsWith(".class")
|
||||
&& (fileName.startsWith("Test") || fileName.startsWith("IntegrationTest"));
|
||||
}
|
||||
};
|
||||
|
||||
/**
|
||||
* Throws an assertion if we find a test class without category (small/medium/large/integration).
|
||||
* List all the test classes without category in the assertion message.
|
||||
|
@ -63,124 +49,13 @@ public class TestCheckTestClasses {
|
|||
@Test
|
||||
public void checkClasses() throws Exception {
|
||||
List<Class<?>> badClasses = new java.util.ArrayList<Class<?>>();
|
||||
|
||||
for (Class<?> c : findTestClasses()) {
|
||||
if (!existCategoryAnnotation(c, null)) {
|
||||
ClassTestFinder classFinder = new ClassTestFinder();
|
||||
for (Class<?> c : classFinder.findClasses(false)) {
|
||||
if (ClassTestFinder.getCategoryAnnotations(c).length == 0) {
|
||||
badClasses.add(c);
|
||||
}
|
||||
}
|
||||
|
||||
assertTrue("There are " + badClasses.size() + " test classes without category: "
|
||||
+ badClasses, badClasses.isEmpty());
|
||||
}
|
||||
|
||||
/** Returns whether the class has @Category annotation having the xface value.
|
||||
*/
|
||||
private boolean existCategoryAnnotation(Class<?> c, Class<?> xface) {
|
||||
Category category = c.getAnnotation(Category.class);
|
||||
|
||||
if (category != null) {
|
||||
if (xface == null) {
|
||||
return true;
|
||||
}
|
||||
for (Class<?> cc : category.value()) {
|
||||
if (cc.equals(xface)) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
* A class is considered as a test class if:
|
||||
* - it's not Abstract AND
|
||||
* - one or more of its methods is annotated with org.junit.Test OR
|
||||
* - the class is annotated with Suite.SuiteClasses
|
||||
* */
|
||||
private boolean isTestClass(Class<?> c) {
|
||||
if (Modifier.isAbstract(c.getModifiers())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (c.getAnnotation(Suite.SuiteClasses.class) != null) {
|
||||
return true;
|
||||
}
|
||||
|
||||
for (Method met : c.getMethods()) {
|
||||
if (met.getAnnotation(Test.class) != null) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Finds test classes which are annotated with @Category having xface value
|
||||
* @param xface the @Category value
|
||||
*/
|
||||
public List<Class<?>> findTestClasses(Class<?> xface) throws ClassNotFoundException, IOException {
|
||||
List<Class<?>> classes = new ArrayList<Class<?>>();
|
||||
for (Class<?> c : findTestClasses()) {
|
||||
if (existCategoryAnnotation(c, xface)) {
|
||||
classes.add(c);
|
||||
}
|
||||
}
|
||||
return classes;
|
||||
}
|
||||
|
||||
private List<Class<?>> findTestClasses() throws ClassNotFoundException, IOException {
|
||||
final String packageName = "org.apache.hadoop.hbase";
|
||||
final String path = packageName.replace('.', '/');
|
||||
|
||||
Enumeration<URL> resources = this.getClass().getClassLoader().getResources(path);
|
||||
List<File> dirs = new ArrayList<File>();
|
||||
|
||||
while (resources.hasMoreElements()) {
|
||||
URL resource = resources.nextElement();
|
||||
dirs.add(new File(resource.getFile()));
|
||||
}
|
||||
|
||||
List<Class<?>> classes = new ArrayList<Class<?>>();
|
||||
for (File directory : dirs) {
|
||||
classes.addAll(findTestClasses(directory, packageName));
|
||||
}
|
||||
|
||||
return classes;
|
||||
}
|
||||
|
||||
|
||||
private List<Class<?>> findTestClasses(File baseDirectory, String packageName)
|
||||
throws ClassNotFoundException {
|
||||
List<Class<?>> classes = new ArrayList<Class<?>>();
|
||||
if (!baseDirectory.exists()) {
|
||||
return classes;
|
||||
}
|
||||
|
||||
File[] files = baseDirectory.listFiles(TEST_CLASS_FILE_FILTER);
|
||||
assertNotNull(files);
|
||||
Pattern p = Pattern.compile("hbase-hadoop\\d?-compat");
|
||||
for (File file : files) {
|
||||
final String fileName = file.getName();
|
||||
if (p.matcher(file.getAbsolutePath()).find()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (file.isDirectory()) {
|
||||
classes.addAll(findTestClasses(file, packageName + "." + fileName));
|
||||
} else {
|
||||
Class<?> c = Class.forName(
|
||||
packageName + '.' + fileName.substring(0, fileName.length() - 6),
|
||||
false,
|
||||
this.getClass().getClassLoader());
|
||||
|
||||
if (isTestClass(c)) {
|
||||
classes.add(c);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return classes;
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue