HBASE-23621 Reduced the number of Checkstyle violations in tests of hbase-common
Signed-off-by: stack <stack@apache.org>
This commit is contained in:
parent
4a39f0a2c5
commit
2267ab9399
@ -15,7 +15,6 @@
|
|||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
@ -53,24 +52,32 @@ public class ClassFinder {
|
|||||||
|
|
||||||
public interface ResourcePathFilter {
|
public interface ResourcePathFilter {
|
||||||
boolean isCandidatePath(String resourcePath, boolean isJar);
|
boolean isCandidatePath(String resourcePath, boolean isJar);
|
||||||
};
|
}
|
||||||
|
|
||||||
public interface FileNameFilter {
|
public interface FileNameFilter {
|
||||||
boolean isCandidateFile(String fileName, String absFilePath);
|
boolean isCandidateFile(String fileName, String absFilePath);
|
||||||
};
|
}
|
||||||
|
|
||||||
public interface ClassFilter {
|
public interface ClassFilter {
|
||||||
boolean isCandidateClass(Class<?> c);
|
boolean isCandidateClass(Class<?> c);
|
||||||
};
|
}
|
||||||
|
|
||||||
public static class Not implements ResourcePathFilter, FileNameFilter, ClassFilter {
|
public static class Not implements ResourcePathFilter, FileNameFilter, ClassFilter {
|
||||||
private ResourcePathFilter resourcePathFilter;
|
private ResourcePathFilter resourcePathFilter;
|
||||||
private FileNameFilter fileNameFilter;
|
private FileNameFilter fileNameFilter;
|
||||||
private ClassFilter classFilter;
|
private ClassFilter classFilter;
|
||||||
|
|
||||||
public Not(ResourcePathFilter resourcePathFilter){this.resourcePathFilter = resourcePathFilter;}
|
public Not(ResourcePathFilter resourcePathFilter) {
|
||||||
public Not(FileNameFilter fileNameFilter){this.fileNameFilter = fileNameFilter;}
|
this.resourcePathFilter = resourcePathFilter;
|
||||||
public Not(ClassFilter classFilter){this.classFilter = classFilter;}
|
}
|
||||||
|
|
||||||
|
public Not(FileNameFilter fileNameFilter) {
|
||||||
|
this.fileNameFilter = fileNameFilter;
|
||||||
|
}
|
||||||
|
|
||||||
|
public Not(ClassFilter classFilter) {
|
||||||
|
this.classFilter = classFilter;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean isCandidatePath(String resourcePath, boolean isJar) {
|
public boolean isCandidatePath(String resourcePath, boolean isJar) {
|
||||||
@ -90,7 +97,10 @@ public class ClassFinder {
|
|||||||
ClassFilter[] classFilters;
|
ClassFilter[] classFilters;
|
||||||
ResourcePathFilter[] resourcePathFilters;
|
ResourcePathFilter[] resourcePathFilters;
|
||||||
|
|
||||||
public And(ClassFilter...classFilters) { this.classFilters = classFilters; }
|
public And(ClassFilter...classFilters) {
|
||||||
|
this.classFilters = classFilters;
|
||||||
|
}
|
||||||
|
|
||||||
public And(ResourcePathFilter... resourcePathFilters) {
|
public And(ResourcePathFilter... resourcePathFilters) {
|
||||||
this.resourcePathFilters = resourcePathFilters;
|
this.resourcePathFilters = resourcePathFilters;
|
||||||
}
|
}
|
||||||
@ -120,10 +130,6 @@ public class ClassFinder {
|
|||||||
this(null, null, null, classLoader);
|
this(null, null, null, classLoader);
|
||||||
}
|
}
|
||||||
|
|
||||||
public ClassFinder() {
|
|
||||||
this(ClassLoader.getSystemClassLoader());
|
|
||||||
}
|
|
||||||
|
|
||||||
public ClassFinder(ResourcePathFilter resourcePathFilter, FileNameFilter fileNameFilter,
|
public ClassFinder(ResourcePathFilter resourcePathFilter, FileNameFilter fileNameFilter,
|
||||||
ClassFilter classFilter) {
|
ClassFilter classFilter) {
|
||||||
this(resourcePathFilter, fileNameFilter, classFilter, ClassLoader.getSystemClassLoader());
|
this(resourcePathFilter, fileNameFilter, classFilter, ClassLoader.getSystemClassLoader());
|
||||||
@ -180,7 +186,7 @@ public class ClassFinder {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Set<Class<?>> classes = new HashSet<Class<?>>();
|
Set<Class<?>> classes = new HashSet<>();
|
||||||
for (File directory : dirs) {
|
for (File directory : dirs) {
|
||||||
classes.addAll(findClassesFromFiles(directory, packageName, proceedOnExceptions));
|
classes.addAll(findClassesFromFiles(directory, packageName, proceedOnExceptions));
|
||||||
}
|
}
|
||||||
@ -193,7 +199,7 @@ public class ClassFinder {
|
|||||||
private Set<Class<?>> findClassesFromJar(String jarFileName,
|
private Set<Class<?>> findClassesFromJar(String jarFileName,
|
||||||
String packageName, boolean proceedOnExceptions)
|
String packageName, boolean proceedOnExceptions)
|
||||||
throws IOException, ClassNotFoundException, LinkageError {
|
throws IOException, ClassNotFoundException, LinkageError {
|
||||||
JarInputStream jarFile = null;
|
JarInputStream jarFile;
|
||||||
try {
|
try {
|
||||||
jarFile = new JarInputStream(new FileInputStream(jarFileName));
|
jarFile = new JarInputStream(new FileInputStream(jarFileName));
|
||||||
} catch (IOException ioEx) {
|
} catch (IOException ioEx) {
|
||||||
@ -201,8 +207,8 @@ public class ClassFinder {
|
|||||||
throw ioEx;
|
throw ioEx;
|
||||||
}
|
}
|
||||||
|
|
||||||
Set<Class<?>> classes = new HashSet<Class<?>>();
|
Set<Class<?>> classes = new HashSet<>();
|
||||||
JarEntry entry = null;
|
JarEntry entry;
|
||||||
try {
|
try {
|
||||||
while (true) {
|
while (true) {
|
||||||
try {
|
try {
|
||||||
@ -248,7 +254,7 @@ public class ClassFinder {
|
|||||||
|
|
||||||
private Set<Class<?>> findClassesFromFiles(File baseDirectory, String packageName,
|
private Set<Class<?>> findClassesFromFiles(File baseDirectory, String packageName,
|
||||||
boolean proceedOnExceptions) throws ClassNotFoundException, LinkageError {
|
boolean proceedOnExceptions) throws ClassNotFoundException, LinkageError {
|
||||||
Set<Class<?>> classes = new HashSet<Class<?>>();
|
Set<Class<?>> classes = new HashSet<>();
|
||||||
if (!baseDirectory.exists()) {
|
if (!baseDirectory.exists()) {
|
||||||
LOG.warn("Failed to find " + baseDirectory.getAbsolutePath());
|
LOG.warn("Failed to find " + baseDirectory.getAbsolutePath());
|
||||||
return classes;
|
return classes;
|
||||||
@ -285,16 +291,11 @@ public class ClassFinder {
|
|||||||
Class<?> c = Class.forName(className, false, classLoader);
|
Class<?> c = Class.forName(className, false, classLoader);
|
||||||
boolean isCandidateClass = null == classFilter || classFilter.isCandidateClass(c);
|
boolean isCandidateClass = null == classFilter || classFilter.isCandidateClass(c);
|
||||||
return isCandidateClass ? c : null;
|
return isCandidateClass ? c : null;
|
||||||
} catch (ClassNotFoundException classNotFoundEx) {
|
} catch (ClassNotFoundException | LinkageError exception) {
|
||||||
if (!proceedOnExceptions) {
|
if (!proceedOnExceptions) {
|
||||||
throw classNotFoundEx;
|
throw exception;
|
||||||
}
|
}
|
||||||
LOG.debug("Failed to instantiate or check " + className + ": " + classNotFoundEx);
|
LOG.debug("Failed to instantiate or check " + className + ": " + exception);
|
||||||
} catch (LinkageError linkageEx) {
|
|
||||||
if (!proceedOnExceptions) {
|
|
||||||
throw linkageEx;
|
|
||||||
}
|
|
||||||
LOG.debug("Failed to instantiate or check " + className + ": " + linkageEx);
|
|
||||||
}
|
}
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
@ -313,5 +314,5 @@ public class ClassFinder {
|
|||||||
&& (null == nameFilter
|
&& (null == nameFilter
|
||||||
|| nameFilter.isCandidateFile(file.getName(), file.getAbsolutePath())));
|
|| nameFilter.isCandidateFile(file.getName(), file.getAbsolutePath())));
|
||||||
}
|
}
|
||||||
};
|
}
|
||||||
};
|
}
|
||||||
|
@ -76,9 +76,8 @@ public class HBaseCommonTestingUtility {
|
|||||||
private File dataTestDir = null;
|
private File dataTestDir = null;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return Where to write test data on local filesystem, specific to
|
* @return Where to write test data on local filesystem, specific to the test. Useful for tests
|
||||||
* the test. Useful for tests that do not use a cluster.
|
* that do not use a cluster. Creates it if it does not exist already.
|
||||||
* Creates it if it does not exist already.
|
|
||||||
*/
|
*/
|
||||||
public Path getDataTestDir() {
|
public Path getDataTestDir() {
|
||||||
if (this.dataTestDir == null) {
|
if (this.dataTestDir == null) {
|
||||||
@ -88,10 +87,9 @@ public class HBaseCommonTestingUtility {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @param subdirName
|
* @param subdirName the name of the subdirectory in the test data directory
|
||||||
* @return Path to a subdirectory named <code>subdirName</code> under
|
* @return Path to a subdirectory named {code subdirName} under
|
||||||
* {@link #getDataTestDir()}.
|
* {@link #getDataTestDir()}. Does *NOT* create it if it does not exist.
|
||||||
* Does *NOT* create it if it does not exist.
|
|
||||||
*/
|
*/
|
||||||
public Path getDataTestDir(final String subdirName) {
|
public Path getDataTestDir(final String subdirName) {
|
||||||
return new Path(getDataTestDir(), subdirName);
|
return new Path(getDataTestDir(), subdirName);
|
||||||
@ -115,7 +113,10 @@ public class HBaseCommonTestingUtility {
|
|||||||
this.dataTestDir = new File(testPath.toString()).getAbsoluteFile();
|
this.dataTestDir = new File(testPath.toString()).getAbsoluteFile();
|
||||||
// Set this property so if mapreduce jobs run, they will use this as their home dir.
|
// Set this property so if mapreduce jobs run, they will use this as their home dir.
|
||||||
System.setProperty("test.build.dir", this.dataTestDir.toString());
|
System.setProperty("test.build.dir", this.dataTestDir.toString());
|
||||||
if (deleteOnExit()) this.dataTestDir.deleteOnExit();
|
|
||||||
|
if (deleteOnExit()) {
|
||||||
|
this.dataTestDir.deleteOnExit();
|
||||||
|
}
|
||||||
|
|
||||||
createSubDir("hbase.local.dir", testPath, "hbase-local-dir");
|
createSubDir("hbase.local.dir", testPath, "hbase-local-dir");
|
||||||
|
|
||||||
@ -125,7 +126,11 @@ public class HBaseCommonTestingUtility {
|
|||||||
protected void createSubDir(String propertyName, Path parent, String subDirName) {
|
protected void createSubDir(String propertyName, Path parent, String subDirName) {
|
||||||
Path newPath = new Path(parent, subDirName);
|
Path newPath = new Path(parent, subDirName);
|
||||||
File newDir = new File(newPath.toString()).getAbsoluteFile();
|
File newDir = new File(newPath.toString()).getAbsoluteFile();
|
||||||
if (deleteOnExit()) newDir.deleteOnExit();
|
|
||||||
|
if (deleteOnExit()) {
|
||||||
|
newDir.deleteOnExit();
|
||||||
|
}
|
||||||
|
|
||||||
conf.set(propertyName, newDir.getAbsolutePath());
|
conf.set(propertyName, newDir.getAbsolutePath());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -140,9 +145,8 @@ public class HBaseCommonTestingUtility {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @return True if we removed the test dirs
|
* @return True if we removed the test dirs
|
||||||
* @throws IOException
|
|
||||||
*/
|
*/
|
||||||
public boolean cleanupTestDir() throws IOException {
|
public boolean cleanupTestDir() {
|
||||||
if (deleteDir(this.dataTestDir)) {
|
if (deleteDir(this.dataTestDir)) {
|
||||||
this.dataTestDir = null;
|
this.dataTestDir = null;
|
||||||
return true;
|
return true;
|
||||||
@ -153,9 +157,8 @@ public class HBaseCommonTestingUtility {
|
|||||||
/**
|
/**
|
||||||
* @param subdir Test subdir name.
|
* @param subdir Test subdir name.
|
||||||
* @return True if we removed the test dir
|
* @return True if we removed the test dir
|
||||||
* @throws IOException
|
|
||||||
*/
|
*/
|
||||||
boolean cleanupTestDir(final String subdir) throws IOException {
|
boolean cleanupTestDir(final String subdir) {
|
||||||
if (this.dataTestDir == null) {
|
if (this.dataTestDir == null) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@ -164,9 +167,9 @@ public class HBaseCommonTestingUtility {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @return Where to write test data on local filesystem; usually
|
* @return Where to write test data on local filesystem; usually
|
||||||
* {@link #DEFAULT_BASE_TEST_DIRECTORY}
|
* {@link #DEFAULT_BASE_TEST_DIRECTORY}
|
||||||
* Should not be used by the unit tests, hence its's private.
|
* Should not be used by the unit tests, hence its's private.
|
||||||
* Unit test will use a subdirectory of this directory.
|
* Unit test will use a subdirectory of this directory.
|
||||||
* @see #setupDataTestDir()
|
* @see #setupDataTestDir()
|
||||||
*/
|
*/
|
||||||
private Path getBaseTestDir() {
|
private Path getBaseTestDir() {
|
||||||
@ -185,9 +188,8 @@ public class HBaseCommonTestingUtility {
|
|||||||
/**
|
/**
|
||||||
* @param dir Directory to delete
|
* @param dir Directory to delete
|
||||||
* @return True if we deleted it.
|
* @return True if we deleted it.
|
||||||
* @throws IOException
|
|
||||||
*/
|
*/
|
||||||
boolean deleteDir(final File dir) throws IOException {
|
boolean deleteDir(final File dir) {
|
||||||
if (dir == null || !dir.exists()) {
|
if (dir == null || !dir.exists()) {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
@ -195,7 +197,10 @@ public class HBaseCommonTestingUtility {
|
|||||||
do {
|
do {
|
||||||
ntries += 1;
|
ntries += 1;
|
||||||
try {
|
try {
|
||||||
if (deleteOnExit()) FileUtils.deleteDirectory(dir);
|
if (deleteOnExit()) {
|
||||||
|
FileUtils.deleteDirectory(dir);
|
||||||
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
} catch (IOException ex) {
|
} catch (IOException ex) {
|
||||||
LOG.warn("Failed to delete " + dir.getAbsolutePath());
|
LOG.warn("Failed to delete " + dir.getAbsolutePath());
|
||||||
|
@ -16,7 +16,6 @@
|
|||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
@ -46,7 +45,6 @@ public class ResourceChecker {
|
|||||||
this.tagLine = tagLine;
|
this.tagLine = tagLine;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Class to implement for each type of resource.
|
* Class to implement for each type of resource.
|
||||||
*/
|
*/
|
||||||
@ -83,21 +81,22 @@ public class ResourceChecker {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* The value for the resource.
|
* The value for the resource.
|
||||||
* @param phase
|
* @param phase the {@link Phase} to get the value for
|
||||||
*/
|
*/
|
||||||
abstract public int getVal(Phase phase);
|
abstract public int getVal(Phase phase);
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Retrieves List of Strings which would be logged in logEndings()
|
* Retrieves List of Strings which would be logged in logEndings()
|
||||||
*/
|
*/
|
||||||
public List<String> getStringsToLog() { return null; }
|
public List<String> getStringsToLog() {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private List<ResourceAnalyzer> ras = new ArrayList<ResourceAnalyzer>();
|
private List<ResourceAnalyzer> ras = new ArrayList<ResourceAnalyzer>();
|
||||||
private int[] initialValues;
|
private int[] initialValues;
|
||||||
private int[] endingValues;
|
private int[] endingValues;
|
||||||
|
|
||||||
|
|
||||||
private void fillInit() {
|
private void fillInit() {
|
||||||
initialValues = new int[ras.size()];
|
initialValues = new int[ras.size()];
|
||||||
fill(Phase.INITIAL, initialValues);
|
fill(Phase.INITIAL, initialValues);
|
||||||
@ -141,7 +140,11 @@ public class ResourceChecker {
|
|||||||
StringBuilder sb = new StringBuilder();
|
StringBuilder sb = new StringBuilder();
|
||||||
for (ResourceAnalyzer ra : ras) {
|
for (ResourceAnalyzer ra : ras) {
|
||||||
int cur = initialValues[i++];
|
int cur = initialValues[i++];
|
||||||
if (sb.length() > 0) sb.append(", ");
|
|
||||||
|
if (sb.length() > 0) {
|
||||||
|
sb.append(", ");
|
||||||
|
}
|
||||||
|
|
||||||
sb.append(ra.getName()).append("=").append(cur);
|
sb.append(ra.getName()).append("=").append(cur);
|
||||||
}
|
}
|
||||||
LOG.info("before: " + tagLine + " " + sb);
|
LOG.info("before: " + tagLine + " " + sb);
|
||||||
@ -156,7 +159,11 @@ public class ResourceChecker {
|
|||||||
for (ResourceAnalyzer ra : ras) {
|
for (ResourceAnalyzer ra : ras) {
|
||||||
int curP = initialValues[i];
|
int curP = initialValues[i];
|
||||||
int curN = endingValues[i++];
|
int curN = endingValues[i++];
|
||||||
if (sb.length() > 0) sb.append(", ");
|
|
||||||
|
if (sb.length() > 0) {
|
||||||
|
sb.append(", ");
|
||||||
|
}
|
||||||
|
|
||||||
sb.append(ra.getName()).append("=").append(curN).append(" (was ").append(curP).append(")");
|
sb.append(ra.getName()).append("=").append(curN).append(" (was ").append(curP).append(")");
|
||||||
if (curN > curP) {
|
if (curN > curP) {
|
||||||
List<String> strings = ra.getStringsToLog();
|
List<String> strings = ra.getStringsToLog();
|
||||||
@ -171,7 +178,6 @@ public class ResourceChecker {
|
|||||||
LOG.info("after: " + tagLine + " " + sb);
|
LOG.info("after: " + tagLine + " " + sb);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* To be called as the beginning of a test method:
|
* To be called as the beginning of a test method:
|
||||||
* - measure the resources
|
* - measure the resources
|
||||||
|
@ -15,7 +15,6 @@
|
|||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase;
|
||||||
|
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
@ -38,7 +37,7 @@ public class TestCellUtil {
|
|||||||
/**
|
/**
|
||||||
* CellScannable used in test. Returns a {@link TestCellScanner}
|
* CellScannable used in test. Returns a {@link TestCellScanner}
|
||||||
*/
|
*/
|
||||||
private class TestCellScannable implements CellScannable {
|
private static class TestCellScannable implements CellScannable {
|
||||||
private final int cellsCount;
|
private final int cellsCount;
|
||||||
TestCellScannable(final int cellsCount) {
|
TestCellScannable(final int cellsCount) {
|
||||||
this.cellsCount = cellsCount;
|
this.cellsCount = cellsCount;
|
||||||
@ -47,7 +46,7 @@ public class TestCellUtil {
|
|||||||
public CellScanner cellScanner() {
|
public CellScanner cellScanner() {
|
||||||
return new TestCellScanner(this.cellsCount);
|
return new TestCellScanner(this.cellsCount);
|
||||||
}
|
}
|
||||||
};
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* CellScanner used in test.
|
* CellScanner used in test.
|
||||||
@ -67,7 +66,7 @@ public class TestCellUtil {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public boolean advance() throws IOException {
|
public boolean advance() {
|
||||||
if (this.count < cellsCount) {
|
if (this.count < cellsCount) {
|
||||||
this.current = new TestCell(this.count);
|
this.current = new TestCell(this.count);
|
||||||
this.count++;
|
this.count++;
|
||||||
@ -221,34 +220,35 @@ public class TestCellUtil {
|
|||||||
// TODO Auto-generated method stub
|
// TODO Auto-generated method stub
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
};
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Was overflowing if 100k or so lists of cellscanners to return.
|
* Was overflowing if 100k or so lists of cellscanners to return.
|
||||||
* @throws IOException
|
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testCreateCellScannerOverflow() throws IOException {
|
public void testCreateCellScannerOverflow() throws IOException {
|
||||||
consume(doCreateCellScanner(1, 1), 1 * 1);
|
consume(doCreateCellScanner(1, 1), 1);
|
||||||
consume(doCreateCellScanner(3, 0), 3 * 0);
|
consume(doCreateCellScanner(3, 0), 0);
|
||||||
consume(doCreateCellScanner(3, 3), 3 * 3);
|
consume(doCreateCellScanner(3, 3), 3 * 3);
|
||||||
consume(doCreateCellScanner(0, 1), 0 * 1);
|
consume(doCreateCellScanner(0, 1), 0);
|
||||||
// Do big number. See HBASE-11813 for why.
|
// Do big number. See HBASE-11813 for why.
|
||||||
final int hundredK = 100000;
|
final int hundredK = 100000;
|
||||||
consume(doCreateCellScanner(hundredK, 0), hundredK * 0);
|
consume(doCreateCellScanner(hundredK, 0), 0);
|
||||||
consume(doCreateCellArray(1), 1);
|
consume(doCreateCellArray(1), 1);
|
||||||
consume(doCreateCellArray(0), 0);
|
consume(doCreateCellArray(0), 0);
|
||||||
consume(doCreateCellArray(3), 3);
|
consume(doCreateCellArray(3), 3);
|
||||||
List<CellScannable> cells = new ArrayList<CellScannable>(hundredK);
|
List<CellScannable> cells = new ArrayList<>(hundredK);
|
||||||
for (int i = 0; i < hundredK; i++) {
|
for (int i = 0; i < hundredK; i++) {
|
||||||
cells.add(new TestCellScannable(1));
|
cells.add(new TestCellScannable(1));
|
||||||
}
|
}
|
||||||
consume(CellUtil.createCellScanner(cells), hundredK * 1);
|
consume(CellUtil.createCellScanner(cells), hundredK);
|
||||||
NavigableMap<byte [], List<Cell>> m = new TreeMap<byte [], List<Cell>>(Bytes.BYTES_COMPARATOR);
|
NavigableMap<byte [], List<Cell>> m = new TreeMap<>(Bytes.BYTES_COMPARATOR);
|
||||||
List<Cell> cellArray = new ArrayList<Cell>(hundredK);
|
List<Cell> cellArray = new ArrayList<>(hundredK);
|
||||||
for (int i = 0; i < hundredK; i++) cellArray.add(new TestCell(i));
|
for (int i = 0; i < hundredK; i++) {
|
||||||
|
cellArray.add(new TestCell(i));
|
||||||
|
}
|
||||||
m.put(new byte [] {'f'}, cellArray);
|
m.put(new byte [] {'f'}, cellArray);
|
||||||
consume(CellUtil.createCellScanner(m), hundredK * 1);
|
consume(CellUtil.createCellScanner(m), hundredK);
|
||||||
}
|
}
|
||||||
|
|
||||||
private CellScanner doCreateCellArray(final int itemsPerList) {
|
private CellScanner doCreateCellArray(final int itemsPerList) {
|
||||||
@ -259,9 +259,8 @@ public class TestCellUtil {
|
|||||||
return CellUtil.createCellScanner(cells);
|
return CellUtil.createCellScanner(cells);
|
||||||
}
|
}
|
||||||
|
|
||||||
private CellScanner doCreateCellScanner(final int listsCount, final int itemsPerList)
|
private CellScanner doCreateCellScanner(final int listsCount, final int itemsPerList) {
|
||||||
throws IOException {
|
List<CellScannable> cells = new ArrayList<>(listsCount);
|
||||||
List<CellScannable> cells = new ArrayList<CellScannable>(listsCount);
|
|
||||||
for (int i = 0; i < listsCount; i++) {
|
for (int i = 0; i < listsCount; i++) {
|
||||||
CellScannable cs = new CellScannable() {
|
CellScannable cs = new CellScannable() {
|
||||||
@Override
|
@Override
|
||||||
@ -276,7 +275,9 @@ public class TestCellUtil {
|
|||||||
|
|
||||||
private void consume(final CellScanner scanner, final int expected) throws IOException {
|
private void consume(final CellScanner scanner, final int expected) throws IOException {
|
||||||
int count = 0;
|
int count = 0;
|
||||||
while (scanner.advance()) count++;
|
while (scanner.advance()) {
|
||||||
|
count++;
|
||||||
|
}
|
||||||
Assert.assertEquals(expected, count);
|
Assert.assertEquals(expected, count);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -384,7 +385,7 @@ public class TestCellUtil {
|
|||||||
@Test
|
@Test
|
||||||
public void testToString() {
|
public void testToString() {
|
||||||
byte [] row = Bytes.toBytes("row");
|
byte [] row = Bytes.toBytes("row");
|
||||||
long ts = 123l;
|
long ts = 123L;
|
||||||
// Make a KeyValue and a Cell and see if same toString result.
|
// Make a KeyValue and a Cell and see if same toString result.
|
||||||
KeyValue kv = new KeyValue(row, HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY,
|
KeyValue kv = new KeyValue(row, HConstants.EMPTY_BYTE_ARRAY, HConstants.EMPTY_BYTE_ARRAY,
|
||||||
ts, KeyValue.Type.Minimum, HConstants.EMPTY_BYTE_ARRAY);
|
ts, KeyValue.Type.Minimum, HConstants.EMPTY_BYTE_ARRAY);
|
||||||
@ -400,7 +401,6 @@ public class TestCellUtil {
|
|||||||
HConstants.EMPTY_BYTE_ARRAY);
|
HConstants.EMPTY_BYTE_ARRAY);
|
||||||
cellToString = CellUtil.getCellKeyAsString(cell);
|
cellToString = CellUtil.getCellKeyAsString(cell);
|
||||||
assertEquals(kv.toString(), cellToString);
|
assertEquals(kv.toString(), cellToString);
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -1,5 +1,4 @@
|
|||||||
/**
|
/**
|
||||||
*
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
* or more contributor license agreements. See the NOTICE file
|
* or more contributor license agreements. See the NOTICE file
|
||||||
* distributed with this work for additional information
|
* distributed with this work for additional information
|
||||||
@ -53,7 +52,6 @@ import org.junit.rules.TestName;
|
|||||||
|
|
||||||
@Category(SmallTests.class)
|
@Category(SmallTests.class)
|
||||||
public class TestClassFinder {
|
public class TestClassFinder {
|
||||||
|
|
||||||
private static final Log LOG = LogFactory.getLog(TestClassFinder.class);
|
private static final Log LOG = LogFactory.getLog(TestClassFinder.class);
|
||||||
|
|
||||||
@Rule public TestName name = new TestName();
|
@Rule public TestName name = new TestName();
|
||||||
@ -89,7 +87,7 @@ public class TestClassFinder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@AfterClass
|
@AfterClass
|
||||||
public static void deleteTestDir() throws IOException {
|
public static void deleteTestDir() {
|
||||||
testUtil.cleanupTestDir(TestClassFinder.class.getSimpleName());
|
testUtil.cleanupTestDir(TestClassFinder.class.getSimpleName());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -185,8 +183,7 @@ public class TestClassFinder {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private static String createAndLoadJar(final String packageNameSuffix,
|
private static String createAndLoadJar(final String packageNameSuffix,
|
||||||
final String classNamePrefix, final long counter)
|
final String classNamePrefix, final long counter) throws Exception {
|
||||||
throws Exception {
|
|
||||||
FileAndPath c1 = compileTestClass(counter, packageNameSuffix, classNamePrefix);
|
FileAndPath c1 = compileTestClass(counter, packageNameSuffix, classNamePrefix);
|
||||||
FileAndPath c2 = compileTestClass(counter, packageNameSuffix, PREFIX + "1");
|
FileAndPath c2 = compileTestClass(counter, packageNameSuffix, PREFIX + "1");
|
||||||
FileAndPath c3 = compileTestClass(counter, packageNameSuffix, PREFIX + classNamePrefix + "2");
|
FileAndPath c3 = compileTestClass(counter, packageNameSuffix, PREFIX + classNamePrefix + "2");
|
||||||
@ -241,7 +238,9 @@ public class TestClassFinder {
|
|||||||
|
|
||||||
private static boolean contains(final Set<Class<?>> classes, final String simpleName) {
|
private static boolean contains(final Set<Class<?>> classes, final String simpleName) {
|
||||||
for (Class<?> c: classes) {
|
for (Class<?> c: classes) {
|
||||||
if (c.getSimpleName().equals(simpleName)) return true;
|
if (c.getSimpleName().equals(simpleName)) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
@ -299,8 +298,7 @@ public class TestClassFinder {
|
|||||||
@Test
|
@Test
|
||||||
public void testClassFinderFiltersByPathInDirs() throws Exception {
|
public void testClassFinderFiltersByPathInDirs() throws Exception {
|
||||||
final String hardcodedThisSubdir = "hbase-common";
|
final String hardcodedThisSubdir = "hbase-common";
|
||||||
final ClassFinder.ResourcePathFilter notExcJarFilter =
|
final ClassFinder.ResourcePathFilter notExcJarFilter = new ClassFinder.ResourcePathFilter() {
|
||||||
new ClassFinder.ResourcePathFilter() {
|
|
||||||
@Override
|
@Override
|
||||||
public boolean isCandidatePath(String resourcePath, boolean isJar) {
|
public boolean isCandidatePath(String resourcePath, boolean isJar) {
|
||||||
return isJar || !resourcePath.contains(hardcodedThisSubdir);
|
return isJar || !resourcePath.contains(hardcodedThisSubdir);
|
||||||
@ -383,7 +381,7 @@ public class TestClassFinder {
|
|||||||
// Directory entries for all packages have to be added explicitly for
|
// Directory entries for all packages have to be added explicitly for
|
||||||
// resources to be findable via ClassLoader. Directory entries must end
|
// resources to be findable via ClassLoader. Directory entries must end
|
||||||
// with "/"; the initial one is expected to, also.
|
// with "/"; the initial one is expected to, also.
|
||||||
Set<String> pathsInJar = new HashSet<String>();
|
Set<String> pathsInJar = new HashSet<>();
|
||||||
for (FileAndPath fileAndPath : filesInJar) {
|
for (FileAndPath fileAndPath : filesInJar) {
|
||||||
String pathToAdd = fileAndPath.path;
|
String pathToAdd = fileAndPath.path;
|
||||||
while (pathsInJar.add(pathToAdd)) {
|
while (pathsInJar.add(pathToAdd)) {
|
||||||
@ -421,7 +419,6 @@ public class TestClassFinder {
|
|||||||
|
|
||||||
// Java 11 workaround - Custom class loader to expose addUrl method of URLClassLoader
|
// Java 11 workaround - Custom class loader to expose addUrl method of URLClassLoader
|
||||||
private static class CustomClassloader extends URLClassLoader {
|
private static class CustomClassloader extends URLClassLoader {
|
||||||
|
|
||||||
public CustomClassloader(URL[] urls, ClassLoader parentLoader) {
|
public CustomClassloader(URL[] urls, ClassLoader parentLoader) {
|
||||||
super(urls, parentLoader);
|
super(urls, parentLoader);
|
||||||
}
|
}
|
||||||
|
@ -15,7 +15,6 @@
|
|||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase;
|
||||||
|
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
@ -40,7 +39,6 @@ import com.google.common.collect.ImmutableMap;
|
|||||||
|
|
||||||
@Category(SmallTests.class)
|
@Category(SmallTests.class)
|
||||||
public class TestHBaseConfiguration {
|
public class TestHBaseConfiguration {
|
||||||
|
|
||||||
private static final Log LOG = LogFactory.getLog(TestHBaseConfiguration.class);
|
private static final Log LOG = LogFactory.getLog(TestHBaseConfiguration.class);
|
||||||
|
|
||||||
private static HBaseCommonTestingUtility UTIL = new HBaseCommonTestingUtility();
|
private static HBaseCommonTestingUtility UTIL = new HBaseCommonTestingUtility();
|
||||||
@ -51,7 +49,7 @@ public class TestHBaseConfiguration {
|
|||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSubset() throws Exception {
|
public void testSubset() {
|
||||||
Configuration conf = HBaseConfiguration.create();
|
Configuration conf = HBaseConfiguration.create();
|
||||||
// subset is used in TableMapReduceUtil#initCredentials to support different security
|
// subset is used in TableMapReduceUtil#initCredentials to support different security
|
||||||
// configurations between source and destination clusters, so we'll use that as an example
|
// configurations between source and destination clusters, so we'll use that as an example
|
||||||
@ -126,7 +124,6 @@ public class TestHBaseConfiguration {
|
|||||||
|
|
||||||
private static Object hadoopCredProviderFactory = null;
|
private static Object hadoopCredProviderFactory = null;
|
||||||
private static Method getProvidersMethod = null;
|
private static Method getProvidersMethod = null;
|
||||||
private static Method getAliasesMethod = null;
|
|
||||||
private static Method getCredentialEntryMethod = null;
|
private static Method getCredentialEntryMethod = null;
|
||||||
private static Method getCredentialMethod = null;
|
private static Method getCredentialMethod = null;
|
||||||
private static Method createCredentialEntryMethod = null;
|
private static Method createCredentialEntryMethod = null;
|
||||||
@ -157,7 +154,7 @@ public class TestHBaseConfiguration {
|
|||||||
hadoopClassesAvailable = false;
|
hadoopClassesAvailable = false;
|
||||||
|
|
||||||
// Load Hadoop CredentialProviderFactory
|
// Load Hadoop CredentialProviderFactory
|
||||||
Class<?> hadoopCredProviderFactoryClz = null;
|
Class<?> hadoopCredProviderFactoryClz;
|
||||||
try {
|
try {
|
||||||
hadoopCredProviderFactoryClz = Class
|
hadoopCredProviderFactoryClz = Class
|
||||||
.forName(HADOOP_CRED_PROVIDER_FACTORY_CLASS_NAME);
|
.forName(HADOOP_CRED_PROVIDER_FACTORY_CLASS_NAME);
|
||||||
@ -177,13 +174,13 @@ public class TestHBaseConfiguration {
|
|||||||
HADOOP_CRED_PROVIDER_FACTORY_GET_PROVIDERS_METHOD_NAME,
|
HADOOP_CRED_PROVIDER_FACTORY_GET_PROVIDERS_METHOD_NAME,
|
||||||
Configuration.class);
|
Configuration.class);
|
||||||
// Load Hadoop CredentialProvider
|
// Load Hadoop CredentialProvider
|
||||||
Class<?> hadoopCredProviderClz = null;
|
Class<?> hadoopCredProviderClz;
|
||||||
hadoopCredProviderClz = Class.forName(HADOOP_CRED_PROVIDER_CLASS_NAME);
|
hadoopCredProviderClz = Class.forName(HADOOP_CRED_PROVIDER_CLASS_NAME);
|
||||||
getCredentialEntryMethod = loadMethod(hadoopCredProviderClz,
|
getCredentialEntryMethod = loadMethod(hadoopCredProviderClz,
|
||||||
HADOOP_CRED_PROVIDER_GET_CREDENTIAL_ENTRY_METHOD_NAME, String.class);
|
HADOOP_CRED_PROVIDER_GET_CREDENTIAL_ENTRY_METHOD_NAME, String.class);
|
||||||
|
|
||||||
getAliasesMethod = loadMethod(hadoopCredProviderClz,
|
Method getAliasesMethod =
|
||||||
HADOOP_CRED_PROVIDER_GET_ALIASES_METHOD_NAME);
|
loadMethod(hadoopCredProviderClz, HADOOP_CRED_PROVIDER_GET_ALIASES_METHOD_NAME);
|
||||||
|
|
||||||
createCredentialEntryMethod = loadMethod(hadoopCredProviderClz,
|
createCredentialEntryMethod = loadMethod(hadoopCredProviderClz,
|
||||||
HADOOP_CRED_PROVIDER_CREATE_CREDENTIAL_ENTRY_METHOD_NAME,
|
HADOOP_CRED_PROVIDER_CREATE_CREDENTIAL_ENTRY_METHOD_NAME,
|
||||||
@ -193,7 +190,7 @@ public class TestHBaseConfiguration {
|
|||||||
HADOOP_CRED_PROVIDER_FLUSH_METHOD_NAME);
|
HADOOP_CRED_PROVIDER_FLUSH_METHOD_NAME);
|
||||||
|
|
||||||
// Load Hadoop CredentialEntry
|
// Load Hadoop CredentialEntry
|
||||||
Class<?> hadoopCredentialEntryClz = null;
|
Class<?> hadoopCredentialEntryClz;
|
||||||
try {
|
try {
|
||||||
hadoopCredentialEntryClz = Class
|
hadoopCredentialEntryClz = Class
|
||||||
.forName(HADOOP_CRED_ENTRY_CLASS_NAME);
|
.forName(HADOOP_CRED_ENTRY_CLASS_NAME);
|
||||||
@ -212,17 +209,15 @@ public class TestHBaseConfiguration {
|
|||||||
LOG.info("Credential provider classes have been" +
|
LOG.info("Credential provider classes have been" +
|
||||||
" loaded and initialized successfully through reflection.");
|
" loaded and initialized successfully through reflection.");
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private Method loadMethod(Class<?> clz, String name, Class<?>... classes)
|
private Method loadMethod(Class<?> clz, String name, Class<?>... classes)
|
||||||
throws Exception {
|
throws Exception {
|
||||||
Method method = null;
|
Method method;
|
||||||
try {
|
try {
|
||||||
method = clz.getMethod(name, classes);
|
method = clz.getMethod(name, classes);
|
||||||
} catch (SecurityException e) {
|
} catch (SecurityException e) {
|
||||||
fail("security exception caught for: " + name + " in " +
|
fail("security exception caught for: " + name + " in " + clz.getCanonicalName());
|
||||||
clz.getCanonicalName());
|
|
||||||
throw e;
|
throw e;
|
||||||
} catch (NoSuchMethodException e) {
|
} catch (NoSuchMethodException e) {
|
||||||
LOG.error("Failed to load the " + name + ": " + e);
|
LOG.error("Failed to load the " + name + ": " + e);
|
||||||
@ -242,19 +237,11 @@ public class TestHBaseConfiguration {
|
|||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
protected List<Object> getCredentialProviders(Configuration conf) {
|
protected List<Object> getCredentialProviders(Configuration conf) {
|
||||||
// Call CredentialProviderFactory.getProviders(Configuration)
|
// Call CredentialProviderFactory.getProviders(Configuration)
|
||||||
Object providersObj = null;
|
Object providersObj;
|
||||||
try {
|
try {
|
||||||
providersObj = getProvidersMethod.invoke(hadoopCredProviderFactory,
|
providersObj = getProvidersMethod.invoke(hadoopCredProviderFactory,
|
||||||
conf);
|
conf);
|
||||||
} catch (IllegalArgumentException e) {
|
} catch (IllegalArgumentException | InvocationTargetException | IllegalAccessException e) {
|
||||||
LOG.error("Failed to invoke: " + getProvidersMethod.getName() +
|
|
||||||
": " + e);
|
|
||||||
return null;
|
|
||||||
} catch (IllegalAccessException e) {
|
|
||||||
LOG.error("Failed to invoke: " + getProvidersMethod.getName() +
|
|
||||||
": " + e);
|
|
||||||
return null;
|
|
||||||
} catch (InvocationTargetException e) {
|
|
||||||
LOG.error("Failed to invoke: " + getProvidersMethod.getName() +
|
LOG.error("Failed to invoke: " + getProvidersMethod.getName() +
|
||||||
": " + e);
|
": " + e);
|
||||||
return null;
|
return null;
|
||||||
@ -281,7 +268,6 @@ public class TestHBaseConfiguration {
|
|||||||
*/
|
*/
|
||||||
public void createEntry(Configuration conf, String name, char[] credential)
|
public void createEntry(Configuration conf, String name, char[] credential)
|
||||||
throws Exception {
|
throws Exception {
|
||||||
|
|
||||||
if (!isHadoopCredentialProviderAvailable()) {
|
if (!isHadoopCredentialProviderAvailable()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -311,30 +297,17 @@ public class TestHBaseConfiguration {
|
|||||||
*/
|
*/
|
||||||
private void createEntryInProvider(Object credentialProvider,
|
private void createEntryInProvider(Object credentialProvider,
|
||||||
String name, char[] credential) throws Exception {
|
String name, char[] credential) throws Exception {
|
||||||
|
|
||||||
if (!isHadoopCredentialProviderAvailable()) {
|
if (!isHadoopCredentialProviderAvailable()) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
createCredentialEntryMethod.invoke(credentialProvider, name, credential);
|
createCredentialEntryMethod.invoke(credentialProvider, name, credential);
|
||||||
} catch (IllegalArgumentException e) {
|
} catch (IllegalArgumentException | InvocationTargetException | IllegalAccessException e) {
|
||||||
return;
|
|
||||||
} catch (IllegalAccessException e) {
|
|
||||||
return;
|
|
||||||
} catch (InvocationTargetException e) {
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
try {
|
flushMethod.invoke(credentialProvider);
|
||||||
flushMethod.invoke(credentialProvider);
|
|
||||||
} catch (IllegalArgumentException e) {
|
|
||||||
throw e;
|
|
||||||
} catch (IllegalAccessException e) {
|
|
||||||
throw e;
|
|
||||||
} catch (InvocationTargetException e) {
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -43,7 +43,6 @@ import com.google.common.io.CountingOutputStream;
|
|||||||
|
|
||||||
@Category(SmallTests.class)
|
@Category(SmallTests.class)
|
||||||
public class TestCellCodecWithTags {
|
public class TestCellCodecWithTags {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testCellWithTag() throws IOException {
|
public void testCellWithTag() throws IOException {
|
||||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||||
@ -110,4 +109,4 @@ public class TestCellCodecWithTags {
|
|||||||
dis.close();
|
dis.close();
|
||||||
assertEquals(offset, cis.getCount());
|
assertEquals(offset, cis.getCount());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -43,7 +43,6 @@ import com.google.common.io.CountingOutputStream;
|
|||||||
|
|
||||||
@Category(SmallTests.class)
|
@Category(SmallTests.class)
|
||||||
public class TestKeyValueCodecWithTags {
|
public class TestKeyValueCodecWithTags {
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testKeyValueWithTag() throws IOException {
|
public void testKeyValueWithTag() throws IOException {
|
||||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||||
@ -110,4 +109,4 @@ public class TestKeyValueCodecWithTags {
|
|||||||
dis.close();
|
dis.close();
|
||||||
assertEquals(offset, cis.getCount());
|
assertEquals(offset, cis.getCount());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,22 +1,24 @@
|
|||||||
/*
|
/*
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one or more
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
* contributor license agreements. See the NOTICE file distributed with this
|
* or more contributor license agreements. See the NOTICE file
|
||||||
* work for additional information regarding copyright ownership. The ASF
|
* distributed with this work for additional information
|
||||||
* licenses this file to you under the Apache License, Version 2.0 (the
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
* "License"); you may not use this file except in compliance with the License.
|
* to you under the Apache License, Version 2.0 (the
|
||||||
* You may obtain a copy of the License at
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
*
|
*
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
*
|
*
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
* License for the specific language governing permissions and limitations under
|
* See the License for the specific language governing permissions and
|
||||||
* the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase.io.crypto;
|
package org.apache.hadoop.hbase.io.crypto;
|
||||||
|
|
||||||
import static org.junit.Assert.*;
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
import java.io.ByteArrayInputStream;
|
import java.io.ByteArrayInputStream;
|
||||||
import java.io.ByteArrayOutputStream;
|
import java.io.ByteArrayOutputStream;
|
||||||
@ -36,7 +38,6 @@ import org.junit.experimental.categories.Category;
|
|||||||
|
|
||||||
@Category(SmallTests.class)
|
@Category(SmallTests.class)
|
||||||
public class TestEncryption {
|
public class TestEncryption {
|
||||||
|
|
||||||
private static final Log LOG = LogFactory.getLog(TestEncryption.class);
|
private static final Log LOG = LogFactory.getLog(TestEncryption.class);
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@ -45,7 +46,7 @@ public class TestEncryption {
|
|||||||
Bytes.random(key);
|
Bytes.random(key);
|
||||||
byte[] iv = new byte[16];
|
byte[] iv = new byte[16];
|
||||||
Bytes.random(iv);
|
Bytes.random(iv);
|
||||||
for (int size: new int[] { 4, 8, 16, 32, 64, 128, 256, 512 } ) {
|
for (int size: new int[] { 4, 8, 16, 32, 64, 128, 256, 512 }) {
|
||||||
checkTransformSymmetry(key, iv, getRandomBlock(size));
|
checkTransformSymmetry(key, iv, getRandomBlock(size));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -56,7 +57,7 @@ public class TestEncryption {
|
|||||||
Bytes.random(key);
|
Bytes.random(key);
|
||||||
byte[] iv = new byte[16];
|
byte[] iv = new byte[16];
|
||||||
Bytes.random(iv);
|
Bytes.random(iv);
|
||||||
for (int size: new int[] { 256 * 1024, 512 * 1024, 1024 * 1024 } ) {
|
for (int size: new int[] { 256 * 1024, 512 * 1024, 1024 * 1024 }) {
|
||||||
checkTransformSymmetry(key, iv, getRandomBlock(size));
|
checkTransformSymmetry(key, iv, getRandomBlock(size));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -67,7 +68,7 @@ public class TestEncryption {
|
|||||||
Bytes.random(key);
|
Bytes.random(key);
|
||||||
byte[] iv = new byte[16];
|
byte[] iv = new byte[16];
|
||||||
Bytes.random(iv);
|
Bytes.random(iv);
|
||||||
for (int size: new int[] { 3, 7, 11, 23, 47, 79, 119, 175 } ) {
|
for (int size: new int[] { 3, 7, 11, 23, 47, 79, 119, 175 }) {
|
||||||
checkTransformSymmetry(key, iv, getRandomBlock(size));
|
checkTransformSymmetry(key, iv, getRandomBlock(size));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -78,7 +79,7 @@ public class TestEncryption {
|
|||||||
Bytes.random(key);
|
Bytes.random(key);
|
||||||
byte[] iv = new byte[16];
|
byte[] iv = new byte[16];
|
||||||
Bytes.random(iv);
|
Bytes.random(iv);
|
||||||
for (int size: new int[] { 4 * 1024, 8 * 1024, 64 * 1024, 128 * 1024 } ) {
|
for (int size: new int[] { 4 * 1024, 8 * 1024, 64 * 1024, 128 * 1024 }) {
|
||||||
checkTransformSymmetry(key, iv, getRandomBlock(size));
|
checkTransformSymmetry(key, iv, getRandomBlock(size));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -121,5 +122,4 @@ public class TestEncryption {
|
|||||||
Bytes.random(b);
|
Bytes.random(b);
|
||||||
return b;
|
return b;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -18,6 +18,7 @@ package org.apache.hadoop.hbase.util;
|
|||||||
|
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
@ -88,7 +89,6 @@ public class RedundantKVGenerator {
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Various configuration options for generating key values
|
* Various configuration options for generating key values
|
||||||
* @param randomizer pick things by random
|
* @param randomizer pick things by random
|
||||||
@ -111,8 +111,7 @@ public class RedundantKVGenerator {
|
|||||||
float chanceForZeroValue,
|
float chanceForZeroValue,
|
||||||
|
|
||||||
int baseTimestampDivide,
|
int baseTimestampDivide,
|
||||||
int timestampDiffSize
|
int timestampDiffSize) {
|
||||||
) {
|
|
||||||
this.randomizer = randomizer;
|
this.randomizer = randomizer;
|
||||||
|
|
||||||
this.commonPrefix = DEFAULT_COMMON_PREFIX;
|
this.commonPrefix = DEFAULT_COMMON_PREFIX;
|
||||||
@ -140,33 +139,33 @@ public class RedundantKVGenerator {
|
|||||||
private Random randomizer;
|
private Random randomizer;
|
||||||
|
|
||||||
// row settings
|
// row settings
|
||||||
private byte[] commonPrefix;//global prefix before rowPrefixes
|
private byte[] commonPrefix; //global prefix before rowPrefixes
|
||||||
private int numberOfRowPrefixes;
|
private int numberOfRowPrefixes;
|
||||||
private int averagePrefixLength = 6;
|
private int averagePrefixLength;
|
||||||
private int prefixLengthVariance = 3;
|
private int prefixLengthVariance;
|
||||||
private int averageSuffixLength = 3;
|
private int averageSuffixLength;
|
||||||
private int suffixLengthVariance = 3;
|
private int suffixLengthVariance;
|
||||||
private int numberOfRows = 500;
|
private int numberOfRows;
|
||||||
|
|
||||||
//family
|
// family
|
||||||
private byte[] family;
|
private byte[] family;
|
||||||
|
|
||||||
// qualifier
|
// qualifier
|
||||||
private float chanceForSameQualifier = 0.5f;
|
private float chanceForSameQualifier;
|
||||||
private float chanceForSimilarQualifier = 0.4f;
|
private float chanceForSimilarQualifier;
|
||||||
private int averageQualifierLength = 9;
|
private int averageQualifierLength;
|
||||||
private int qualifierLengthVariance = 3;
|
private int qualifierLengthVariance;
|
||||||
|
|
||||||
private int columnFamilyLength = 9;
|
private int columnFamilyLength;
|
||||||
private int valueLength = 8;
|
private int valueLength;
|
||||||
private float chanceForZeroValue = 0.5f;
|
private float chanceForZeroValue;
|
||||||
|
|
||||||
private int baseTimestampDivide = 1000000;
|
private int baseTimestampDivide;
|
||||||
private int timestampDiffSize = 100000000;
|
private int timestampDiffSize;
|
||||||
|
|
||||||
private List<byte[]> generateRows() {
|
private List<byte[]> generateRows() {
|
||||||
// generate prefixes
|
// generate prefixes
|
||||||
List<byte[]> prefixes = new ArrayList<byte[]>();
|
List<byte[]> prefixes = new ArrayList<>();
|
||||||
prefixes.add(new byte[0]);
|
prefixes.add(new byte[0]);
|
||||||
for (int i = 1; i < numberOfRowPrefixes; ++i) {
|
for (int i = 1; i < numberOfRowPrefixes; ++i) {
|
||||||
int prefixLength = averagePrefixLength;
|
int prefixLength = averagePrefixLength;
|
||||||
@ -174,12 +173,11 @@ public class RedundantKVGenerator {
|
|||||||
prefixLengthVariance;
|
prefixLengthVariance;
|
||||||
byte[] newPrefix = new byte[prefixLength];
|
byte[] newPrefix = new byte[prefixLength];
|
||||||
randomizer.nextBytes(newPrefix);
|
randomizer.nextBytes(newPrefix);
|
||||||
byte[] newPrefixWithCommon = newPrefix;
|
prefixes.add(newPrefix);
|
||||||
prefixes.add(newPrefixWithCommon);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// generate rest of the row
|
// generate rest of the row
|
||||||
List<byte[]> rows = new ArrayList<byte[]>();
|
List<byte[]> rows = new ArrayList<>();
|
||||||
for (int i = 0; i < numberOfRows; ++i) {
|
for (int i = 0; i < numberOfRows; ++i) {
|
||||||
int suffixLength = averageSuffixLength;
|
int suffixLength = averageSuffixLength;
|
||||||
suffixLength += randomizer.nextInt(2 * suffixLengthVariance + 1) -
|
suffixLength += randomizer.nextInt(2 * suffixLengthVariance + 1) -
|
||||||
@ -202,16 +200,17 @@ public class RedundantKVGenerator {
|
|||||||
public List<KeyValue> generateTestKeyValues(int howMany) {
|
public List<KeyValue> generateTestKeyValues(int howMany) {
|
||||||
return generateTestKeyValues(howMany, false);
|
return generateTestKeyValues(howMany, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generate test data useful to test encoders.
|
* Generate test data useful to test encoders.
|
||||||
* @param howMany How many Key values should be generated.
|
* @param howMany How many Key values should be generated.
|
||||||
* @return sorted list of key values
|
* @return sorted list of key values
|
||||||
*/
|
*/
|
||||||
public List<KeyValue> generateTestKeyValues(int howMany, boolean useTags) {
|
public List<KeyValue> generateTestKeyValues(int howMany, boolean useTags) {
|
||||||
List<KeyValue> result = new ArrayList<KeyValue>();
|
List<KeyValue> result = new ArrayList<>();
|
||||||
|
|
||||||
List<byte[]> rows = generateRows();
|
List<byte[]> rows = generateRows();
|
||||||
Map<Integer, List<byte[]>> rowsToQualifier = new HashMap<Integer, List<byte[]>>();
|
Map<Integer, List<byte[]>> rowsToQualifier = new HashMap<>();
|
||||||
|
|
||||||
if(family==null){
|
if(family==null){
|
||||||
family = new byte[columnFamilyLength];
|
family = new byte[columnFamilyLength];
|
||||||
@ -268,16 +267,14 @@ public class RedundantKVGenerator {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (randomizer.nextFloat() < chanceForZeroValue) {
|
if (randomizer.nextFloat() < chanceForZeroValue) {
|
||||||
for (int j = 0; j < value.length; ++j) {
|
Arrays.fill(value, (byte) 0);
|
||||||
value[j] = (byte) 0;
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
randomizer.nextBytes(value);
|
randomizer.nextBytes(value);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (useTags) {
|
if (useTags) {
|
||||||
result.add(new KeyValue(row, family, qualifier, timestamp, value, new Tag[] { new Tag(
|
result.add(new KeyValue(row, family, qualifier, timestamp, value, new Tag[] {
|
||||||
(byte) 1, "value1") }));
|
new Tag((byte) 1, "value1") }));
|
||||||
} else {
|
} else {
|
||||||
result.add(new KeyValue(row, family, qualifier, timestamp, value));
|
result.add(new KeyValue(row, family, qualifier, timestamp, value));
|
||||||
}
|
}
|
||||||
@ -313,97 +310,9 @@ public class RedundantKVGenerator {
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
/************************ get/set ***********************************/
|
|
||||||
|
|
||||||
public RedundantKVGenerator setCommonPrefix(byte[] prefix){
|
|
||||||
this.commonPrefix = prefix;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public RedundantKVGenerator setRandomizer(Random randomizer) {
|
|
||||||
this.randomizer = randomizer;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public RedundantKVGenerator setNumberOfRowPrefixes(int numberOfRowPrefixes) {
|
|
||||||
this.numberOfRowPrefixes = numberOfRowPrefixes;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public RedundantKVGenerator setAveragePrefixLength(int averagePrefixLength) {
|
|
||||||
this.averagePrefixLength = averagePrefixLength;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public RedundantKVGenerator setPrefixLengthVariance(int prefixLengthVariance) {
|
|
||||||
this.prefixLengthVariance = prefixLengthVariance;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public RedundantKVGenerator setAverageSuffixLength(int averageSuffixLength) {
|
|
||||||
this.averageSuffixLength = averageSuffixLength;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public RedundantKVGenerator setSuffixLengthVariance(int suffixLengthVariance) {
|
|
||||||
this.suffixLengthVariance = suffixLengthVariance;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public RedundantKVGenerator setNumberOfRows(int numberOfRows) {
|
|
||||||
this.numberOfRows = numberOfRows;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public RedundantKVGenerator setChanceForSameQualifier(float chanceForSameQualifier) {
|
|
||||||
this.chanceForSameQualifier = chanceForSameQualifier;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public RedundantKVGenerator setChanceForSimilarQualifier(float chanceForSimiliarQualifier) {
|
|
||||||
this.chanceForSimilarQualifier = chanceForSimiliarQualifier;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public RedundantKVGenerator setAverageQualifierLength(int averageQualifierLength) {
|
|
||||||
this.averageQualifierLength = averageQualifierLength;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public RedundantKVGenerator setQualifierLengthVariance(int qualifierLengthVariance) {
|
|
||||||
this.qualifierLengthVariance = qualifierLengthVariance;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public RedundantKVGenerator setColumnFamilyLength(int columnFamilyLength) {
|
|
||||||
this.columnFamilyLength = columnFamilyLength;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public RedundantKVGenerator setFamily(byte[] family) {
|
public RedundantKVGenerator setFamily(byte[] family) {
|
||||||
this.family = family;
|
this.family = family;
|
||||||
this.columnFamilyLength = family.length;
|
this.columnFamilyLength = family.length;
|
||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
public RedundantKVGenerator setValueLength(int valueLength) {
|
|
||||||
this.valueLength = valueLength;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public RedundantKVGenerator setChanceForZeroValue(float chanceForZeroValue) {
|
|
||||||
this.chanceForZeroValue = chanceForZeroValue;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public RedundantKVGenerator setBaseTimestampDivide(int baseTimestampDivide) {
|
|
||||||
this.baseTimestampDivide = baseTimestampDivide;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
public RedundantKVGenerator setTimestampDiffSize(int timestampDiffSize) {
|
|
||||||
this.timestampDiffSize = timestampDiffSize;
|
|
||||||
return this;
|
|
||||||
}
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -1187,10 +1187,9 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
|
|||||||
|
|
||||||
/**
|
/**
|
||||||
* @return True if we removed the test dirs
|
* @return True if we removed the test dirs
|
||||||
* @throws IOException
|
|
||||||
*/
|
*/
|
||||||
@Override
|
@Override
|
||||||
public boolean cleanupTestDir() throws IOException {
|
public boolean cleanupTestDir() {
|
||||||
boolean ret = super.cleanupTestDir();
|
boolean ret = super.cleanupTestDir();
|
||||||
if (deleteDir(this.clusterTestDir)) {
|
if (deleteDir(this.clusterTestDir)) {
|
||||||
this.clusterTestDir = null;
|
this.clusterTestDir = null;
|
||||||
|
Loading…
x
Reference in New Issue
Block a user