HBASE-24510 Remove HBaseTestCase and GenericTestUtils (#1859)

Signed-off-by: Michael Stack <stack@apache.org>
This commit is contained in:
Duo Zhang 2020-06-06 19:06:11 +08:00 committed by GitHub
parent 89b7b5a7f9
commit 16116fa35e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
13 changed files with 518 additions and 1226 deletions

View File

@ -1,321 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import java.io.File;
import java.io.IOException;
import java.lang.management.ManagementFactory;
import java.lang.management.ThreadInfo;
import java.lang.management.ThreadMXBean;
import java.lang.reflect.InvocationTargetException;
import java.util.Arrays;
import java.util.Random;
import java.util.Set;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.regex.Pattern;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.util.Time;
import org.junit.Assert;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import org.slf4j.Logger;
import org.apache.hbase.thirdparty.com.google.common.base.Joiner;
import org.apache.hbase.thirdparty.com.google.common.base.Supplier;
import org.apache.hbase.thirdparty.com.google.common.collect.Sets;
/**
* Test provides some very generic helpers which might be used across the tests
*/
public abstract class GenericTestUtils {
private static final AtomicInteger sequence = new AtomicInteger();
/**
* Extracts the name of the method where the invocation has happened
* @return String name of the invoking method
*/
public static String getMethodName() {
return Thread.currentThread().getStackTrace()[2].getMethodName();
}
/**
* Generates a process-wide unique sequence number.
* @return a unique sequence number
*/
public static int uniqueSequenceId() {
return sequence.incrementAndGet();
}
/**
* Assert that a given file exists.
*/
public static void assertExists(File f) {
Assert.assertTrue("File " + f + " should exist", f.exists());
}
/**
* List all of the files in 'dir' that match the regex 'pattern'.
* Then check that this list is identical to 'expectedMatches'.
* @throws IOException if the dir is inaccessible
*/
public static void assertGlobEquals(File dir, String pattern,
String ... expectedMatches) throws IOException {
Set<String> found = Sets.newTreeSet();
for (File f : FileUtil.listFiles(dir)) {
if (f.getName().matches(pattern)) {
found.add(f.getName());
}
}
Set<String> expectedSet = Sets.newTreeSet(
Arrays.asList(expectedMatches));
Assert.assertEquals("Bad files matching " + pattern + " in " + dir,
Joiner.on(",").join(expectedSet),
Joiner.on(",").join(found));
}
public static void waitFor(Supplier<Boolean> check,
int checkEveryMillis, int waitForMillis)
throws TimeoutException, InterruptedException
{
long st = Time.now();
do {
boolean result = check.get();
if (result) {
return;
}
Thread.sleep(checkEveryMillis);
} while (Time.now() - st < waitForMillis);
throw new TimeoutException("Timed out waiting for condition. " +
"Thread diagnostics:\n" +
TimedOutTestsListener.buildThreadDiagnosticString());
}
/**
* Mockito answer helper that triggers one latch as soon as the
* method is called, then waits on another before continuing.
*/
public static class DelayAnswer implements Answer<Object> {
private final Logger LOG;
private final CountDownLatch fireLatch = new CountDownLatch(1);
private final CountDownLatch waitLatch = new CountDownLatch(1);
private final CountDownLatch resultLatch = new CountDownLatch(1);
private final AtomicInteger fireCounter = new AtomicInteger(0);
private final AtomicInteger resultCounter = new AtomicInteger(0);
// Result fields set after proceed() is called.
private volatile Throwable thrown;
private volatile Object returnValue;
public DelayAnswer(Logger log) {
this.LOG = log;
}
/**
* Wait until the method is called.
*/
public void waitForCall() throws InterruptedException {
fireLatch.await();
}
/**
* Tell the method to proceed.
* This should only be called after waitForCall()
*/
public void proceed() {
waitLatch.countDown();
}
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
LOG.info("DelayAnswer firing fireLatch");
fireCounter.getAndIncrement();
fireLatch.countDown();
try {
LOG.info("DelayAnswer waiting on waitLatch");
waitLatch.await();
LOG.info("DelayAnswer delay complete");
} catch (InterruptedException ie) {
throw new IOException("Interrupted waiting on latch", ie);
}
return passThrough(invocation);
}
protected Object passThrough(InvocationOnMock invocation) throws Throwable {
try {
Object ret = invocation.callRealMethod();
returnValue = ret;
return ret;
} catch (Throwable t) {
thrown = t;
throw t;
} finally {
resultCounter.incrementAndGet();
resultLatch.countDown();
}
}
/**
* After calling proceed(), this will wait until the call has
* completed and a result has been returned to the caller.
*/
public void waitForResult() throws InterruptedException {
resultLatch.await();
}
/**
* After the call has gone through, return any exception that
* was thrown, or null if no exception was thrown.
*/
public Throwable getThrown() {
return thrown;
}
/**
* After the call has gone through, return the call's return value,
* or null in case it was void or an exception was thrown.
*/
public Object getReturnValue() {
return returnValue;
}
public int getFireCount() {
return fireCounter.get();
}
public int getResultCount() {
return resultCounter.get();
}
}
/**
* An Answer implementation that simply forwards all calls through
* to a delegate.
*
* This is useful as the default Answer for a mock object, to create
* something like a spy on an RPC proxy. For example:
* <code>
* NamenodeProtocol origNNProxy = secondary.getNameNode();
* NamenodeProtocol spyNNProxy = Mockito.mock(NameNodeProtocol.class,
* new DelegateAnswer(origNNProxy);
* doThrow(...).when(spyNNProxy).getBlockLocations(...);
* ...
* </code>
*/
public static class DelegateAnswer implements Answer<Object> {
private final Object delegate;
private final Logger log;
public DelegateAnswer(Object delegate) {
this(null, delegate);
}
public DelegateAnswer(Logger log, Object delegate) {
this.log = log;
this.delegate = delegate;
}
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
try {
if (log != null) {
log.info("Call to " + invocation + " on " + delegate,
new Exception("TRACE"));
}
return invocation.getMethod().invoke(
delegate, invocation.getArguments());
} catch (InvocationTargetException ite) {
throw ite.getCause();
}
}
}
/**
* An Answer implementation which sleeps for a random number of milliseconds
* between 0 and a configurable value before delegating to the real
* implementation of the method. This can be useful for drawing out race
* conditions.
*/
public static class SleepAnswer implements Answer<Object> {
private final int maxSleepTime;
private static Random r = new Random();
public SleepAnswer(int maxSleepTime) {
this.maxSleepTime = maxSleepTime;
}
@Override
public Object answer(InvocationOnMock invocation) throws Throwable {
boolean interrupted = false;
try {
Thread.sleep(r.nextInt(maxSleepTime));
} catch (InterruptedException ie) {
interrupted = true;
}
try {
return invocation.callRealMethod();
} finally {
if (interrupted) {
Thread.currentThread().interrupt();
}
}
}
}
public static void assertMatches(String output, String pattern) {
Assert.assertTrue("Expected output to match /" + pattern + "/" +
" but got:\n" + output,
Pattern.compile(pattern).matcher(output).find());
}
public static void assertValueNear(long expected, long actual, long allowedError) {
assertValueWithinRange(expected - allowedError, expected + allowedError, actual);
}
public static void assertValueWithinRange(long expectedMin, long expectedMax,
long actual) {
Assert.assertTrue("Expected " + actual + " to be in range (" + expectedMin + ","
+ expectedMax + ")", expectedMin <= actual && actual <= expectedMax);
}
/**
* Assert that there are no threads running whose name matches the
* given regular expression.
* @param regex the regex to match against
*/
public static void assertNoThreadsMatching(String regex) {
Pattern pattern = Pattern.compile(regex);
ThreadMXBean threadBean = ManagementFactory.getThreadMXBean();
ThreadInfo[] infos = threadBean.getThreadInfo(threadBean.getAllThreadIds(), 20);
for (ThreadInfo info : infos) {
if (info == null) continue;
if (pattern.matcher(info.getThreadName()).matches()) {
Assert.fail("Leaked thread: " + info + "\n" +
Joiner.on("\n").join(info.getStackTrace()));
}
}
}
}

View File

@ -1,459 +0,0 @@
/**
*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.NavigableMap;
import junit.framework.AssertionFailedError;
import junit.framework.TestCase;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.log.HBaseMarkers;
import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.regionserver.RegionAsTable;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CommonFSUtils;
import org.apache.hadoop.hbase.util.FSTableDescriptors;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Abstract HBase test class. Initializes a few things that can come in handly
* like an HBaseConfiguration and filesystem.
* @deprecated since 2.0.0 and will be removed in 3.0.0. Write junit4 unit tests using
* {@link HBaseTestingUtility}.
* @see HBaseTestingUtility
* @see <a href="https://issues.apache.org/jira/browse/HBASE-11912">HBASE-11912</a>
*/
@Deprecated
public abstract class HBaseTestCase extends TestCase {
private static final Logger LOG = LoggerFactory.getLogger(HBaseTestCase.class);
protected final static byte [] fam1 = Bytes.toBytes("colfamily11");
protected final static byte [] fam2 = Bytes.toBytes("colfamily21");
protected final static byte [] fam3 = Bytes.toBytes("colfamily31");
protected static final byte [][] COLUMNS = {fam1, fam2, fam3};
private boolean localfs = false;
protected static Path testDir = null;
protected FileSystem fs = null;
protected HRegion meta = null;
protected static final char FIRST_CHAR = 'a';
protected static final char LAST_CHAR = 'z';
protected static final String PUNCTUATION = "~`@#$%^&*()-_+=:;',.<>/?[]{}|";
protected static final byte [] START_KEY_BYTES = {FIRST_CHAR, FIRST_CHAR, FIRST_CHAR};
protected String START_KEY = new String(START_KEY_BYTES, HConstants.UTF8_CHARSET);
protected static final int MAXVERSIONS = 3;
protected final HBaseTestingUtility testUtil = new HBaseTestingUtility();
public volatile Configuration conf = testUtil.getConfiguration();
public final FSTableDescriptors fsTableDescriptors;
{
try {
fsTableDescriptors = new FSTableDescriptors(conf);
} catch (IOException e) {
throw new RuntimeException("Failed to init descriptors", e);
}
}
/** constructor */
public HBaseTestCase() {
super();
}
/**
* @param name
*/
public HBaseTestCase(String name) {
super(name);
}
/**
* Note that this method must be called after the mini hdfs cluster has
* started or we end up with a local file system.
*/
@Override
protected void setUp() throws Exception {
super.setUp();
localfs =
(conf.get("fs.defaultFS", "file:///").compareTo("file:///") == 0);
if (fs == null) {
this.fs = FileSystem.get(conf);
}
try {
if (localfs) {
testDir = getUnitTestdir(getName());
if (fs.exists(testDir)) {
fs.delete(testDir, true);
}
} else {
testDir = CommonFSUtils.getRootDir(conf);
}
} catch (Exception e) {
LOG.error(HBaseMarkers.FATAL, "error during setup", e);
throw e;
}
}
@Override
protected void tearDown() throws Exception {
try {
if (localfs) {
if (this.fs.exists(testDir)) {
this.fs.delete(testDir, true);
}
}
} catch (Exception e) {
LOG.error(HBaseMarkers.FATAL, "error during tear down", e);
}
super.tearDown();
}
/**
* @see HBaseTestingUtility#getBaseTestDir
* @param testName
* @return directory to use for this test
*/
protected Path getUnitTestdir(String testName) {
return testUtil.getDataTestDir(testName);
}
/**
* You must call close on the returned region and then close on the log file it created. Do
* {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)} to close both the region and the WAL.
* @param tableDescriptor TableDescriptor
* @param startKey Start Key
* @param endKey End Key
* @return An {@link HRegion}
* @throws IOException If thrown by
* {@link #createNewHRegion(TableDescriptor, byte[], byte[], Configuration)}
*/
public HRegion createNewHRegion(TableDescriptor tableDescriptor, byte [] startKey,
byte [] endKey)
throws IOException {
return createNewHRegion(tableDescriptor, startKey, endKey, this.conf);
}
public HRegion createNewHRegion(TableDescriptor tableDescriptor, byte[] startKey, byte[] endKey,
Configuration conf) throws IOException {
RegionInfo hri = RegionInfoBuilder.newBuilder(tableDescriptor.getTableName())
.setStartKey(startKey).setEndKey(endKey).build();
return HBaseTestingUtility.createRegionAndWAL(hri, testDir, conf, tableDescriptor);
}
protected HRegion openClosedRegion(final HRegion closedRegion)
throws IOException {
return HRegion.openHRegion(closedRegion, null);
}
/**
* Create a table of name {@code name} with {@link #COLUMNS} for
* families.
* @param name Name to give table.
* @return Column descriptor.
*/
protected TableDescriptor createTableDescriptor(final String name) {
return createTableDescriptor(name, MAXVERSIONS);
}
/**
* Create a table of name {@code name} with {@link #COLUMNS} for
* families.
* @param name Name to give table.
* @param versions How many versions to allow per column.
* @return Column descriptor.
*/
protected TableDescriptor createTableDescriptor(final String name,
final int versions) {
return createTableDescriptor(name, HColumnDescriptor.DEFAULT_MIN_VERSIONS,
versions, HConstants.FOREVER, HColumnDescriptor.DEFAULT_KEEP_DELETED);
}
/**
* Create a table of name {@code name} with {@link #COLUMNS} for
* families.
* @param name Name to give table.
* @param versions How many versions to allow per column.
* @return Column descriptor.
*/
protected TableDescriptor createTableDescriptor(final String name,
final int minVersions, final int versions, final int ttl, KeepDeletedCells keepDeleted) {
TableDescriptorBuilder.ModifyableTableDescriptor tableDescriptor =
new TableDescriptorBuilder.ModifyableTableDescriptor(TableName.valueOf(name));
for (byte[] cfName : new byte[][]{ fam1, fam2, fam3 }) {
tableDescriptor.setColumnFamily(
new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(cfName)
.setMinVersions(minVersions)
.setMaxVersions(versions)
.setKeepDeletedCells(keepDeleted)
.setBlockCacheEnabled(false)
.setTimeToLive(ttl)
);
}
return tableDescriptor;
}
/**
* Add content to region <code>r</code> on the passed column
* <code>column</code>.
* Adds data of the from 'aaa', 'aab', etc where key and value are the same.
* @param r
* @param columnFamily
* @param column
* @throws IOException
* @return count of what we added.
*/
public static long addContent(final Region r, final byte [] columnFamily, final byte[] column)
throws IOException {
byte [] startKey = r.getRegionInfo().getStartKey();
byte [] endKey = r.getRegionInfo().getEndKey();
byte [] startKeyBytes = startKey;
if (startKeyBytes == null || startKeyBytes.length == 0) {
startKeyBytes = START_KEY_BYTES;
}
return addContent(new RegionAsTable(r), Bytes.toString(columnFamily), Bytes.toString(column),
startKeyBytes, endKey, -1);
}
public static long addContent(final Region r, final byte [] columnFamily) throws IOException {
return addContent(r, columnFamily, null);
}
/**
* Add content to region <code>r</code> on the passed column
* <code>column</code>.
* Adds data of the from 'aaa', 'aab', etc where key and value are the same.
* @throws IOException
* @return count of what we added.
*/
public static long addContent(final Table updater,
final String columnFamily) throws IOException {
return addContent(updater, columnFamily, START_KEY_BYTES, null);
}
public static long addContent(final Table updater, final String family,
final String column) throws IOException {
return addContent(updater, family, column, START_KEY_BYTES, null);
}
/**
* Add content to region <code>r</code> on the passed column
* <code>column</code>.
* Adds data of the from 'aaa', 'aab', etc where key and value are the same.
* @return count of what we added.
* @throws IOException
*/
public static long addContent(final Table updater, final String columnFamily,
final byte [] startKeyBytes, final byte [] endKey)
throws IOException {
return addContent(updater, columnFamily, null, startKeyBytes, endKey, -1);
}
public static long addContent(final Table updater, final String family, String column,
final byte [] startKeyBytes, final byte [] endKey) throws IOException {
return addContent(updater, family, column, startKeyBytes, endKey, -1);
}
/**
* Add content to region <code>r</code> on the passed column
* <code>column</code>.
* Adds data of the from 'aaa', 'aab', etc where key and value are the same.
* @return count of what we added.
* @throws IOException
*/
public static long addContent(final Table updater,
final String columnFamily,
final String column,
final byte [] startKeyBytes, final byte [] endKey, final long ts)
throws IOException {
long count = 0;
// Add rows of three characters. The first character starts with the
// 'a' character and runs up to 'z'. Per first character, we run the
// second character over same range. And same for the third so rows
// (and values) look like this: 'aaa', 'aab', 'aac', etc.
char secondCharStart = (char)startKeyBytes[1];
char thirdCharStart = (char)startKeyBytes[2];
EXIT: for (char c = (char)startKeyBytes[0]; c <= LAST_CHAR; c++) {
for (char d = secondCharStart; d <= LAST_CHAR; d++) {
for (char e = thirdCharStart; e <= LAST_CHAR; e++) {
byte [] t = new byte [] {(byte)c, (byte)d, (byte)e};
if (endKey != null && endKey.length > 0
&& Bytes.compareTo(endKey, t) <= 0) {
break EXIT;
}
try {
Put put;
if(ts != -1) {
put = new Put(t, ts);
} else {
put = new Put(t);
}
try {
StringBuilder sb = new StringBuilder();
if (column != null && column.contains(":")) {
sb.append(column);
} else {
if (columnFamily != null) {
sb.append(columnFamily);
if (!columnFamily.endsWith(":")) {
sb.append(":");
}
if (column != null) {
sb.append(column);
}
}
}
byte[][] split =
CellUtil.parseColumn(Bytes.toBytes(sb.toString()));
if(split.length == 1) {
byte[] qualifier = new byte[0];
put.addColumn(split[0], qualifier, t);
} else {
put.addColumn(split[0], split[1], t);
}
put.setDurability(Durability.SKIP_WAL);
updater.put(put);
count++;
} catch (RuntimeException ex) {
ex.printStackTrace();
throw ex;
} catch (IOException ex) {
ex.printStackTrace();
throw ex;
}
} catch (RuntimeException ex) {
ex.printStackTrace();
throw ex;
} catch (IOException ex) {
ex.printStackTrace();
throw ex;
}
}
// Set start character back to FIRST_CHAR after we've done first loop.
thirdCharStart = FIRST_CHAR;
}
secondCharStart = FIRST_CHAR;
}
return count;
}
protected void assertResultEquals(final HRegion region, final byte [] row,
final byte [] family, final byte [] qualifier, final long timestamp,
final byte [] value) throws IOException {
Get get = new Get(row);
get.setTimestamp(timestamp);
Result res = region.get(get);
NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> map =
res.getMap();
byte [] res_value = map.get(family).get(qualifier).get(timestamp);
if (value == null) {
assertEquals(Bytes.toString(family) + " " + Bytes.toString(qualifier) +
" at timestamp " + timestamp, null, res_value);
} else {
if (res_value == null) {
fail(Bytes.toString(family) + " " + Bytes.toString(qualifier) +
" at timestamp " + timestamp + "\" was expected to be \"" +
Bytes.toStringBinary(value) + " but was null");
}
if (res_value != null) {
assertEquals(Bytes.toString(family) + " " + Bytes.toString(qualifier) +
" at timestamp " +
timestamp, value, new String(res_value, StandardCharsets.UTF_8));
}
}
}
/**
* Common method to close down a MiniDFSCluster and the associated file system
*
* @param cluster
*/
public static void shutdownDfs(MiniDFSCluster cluster) {
if (cluster != null) {
LOG.info("Shutting down Mini DFS ");
try {
cluster.shutdown();
} catch (Exception e) {
/// Can get a java.lang.reflect.UndeclaredThrowableException thrown
// here because of an InterruptedException. Don't let exceptions in
// here be cause of test failure.
}
try {
FileSystem fs = cluster.getFileSystem();
if (fs != null) {
LOG.info("Shutting down FileSystem");
fs.close();
}
FileSystem.closeAll();
} catch (IOException e) {
LOG.error("error closing file system", e);
}
}
}
/**
* You must call {@link #closeRootAndMeta()} when done after calling this method. It does cleanup.
* @throws IOException
*/
protected void createMetaRegion() throws IOException {
FSTableDescriptors fsTableDescriptors = new FSTableDescriptors(conf);
meta = HBaseTestingUtility.createRegionAndWAL(RegionInfoBuilder.FIRST_META_REGIONINFO, testDir,
conf, fsTableDescriptors.get(TableName.META_TABLE_NAME));
}
protected void closeRootAndMeta() throws IOException {
HBaseTestingUtility.closeRegionAndWAL(meta);
}
public static void assertByteEquals(byte[] expected,
byte[] actual) {
if (Bytes.compareTo(expected, actual) != 0) {
throw new AssertionFailedError("expected:<" +
Bytes.toString(expected) + "> but was:<" +
Bytes.toString(actual) + ">");
}
}
public static void assertEquals(byte[] expected,
byte[] actual) {
if (Bytes.compareTo(expected, actual) != 0) {
throw new AssertionFailedError("expected:<" +
Bytes.toStringBinary(expected) + "> but was:<" +
Bytes.toStringBinary(actual) + ">");
}
}
}

View File

@ -16,16 +16,21 @@
*/ */
package org.apache.hadoop.hbase; package org.apache.hadoop.hbase;
import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
import java.util.HashSet; import java.util.HashSet;
import java.util.Set; import java.util.Set;
import java.util.Collections; import java.util.Collections;
import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.regionserver.Region;
import org.apache.hadoop.hbase.regionserver.RegionAsTable;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
/** /**
* Similar to {@link HConstants} but for tests. Also provides some simple * Similar to {@link HConstants} but for tests. Also provides some simple static utility functions
* static utility functions to generate test data. * to generate test data.
*/ */
public class HTestConst { public class HTestConst {
@ -34,15 +39,13 @@ public class HTestConst {
public static final String DEFAULT_TABLE_STR = "MyTestTable"; public static final String DEFAULT_TABLE_STR = "MyTestTable";
public static final byte[] DEFAULT_TABLE_BYTES = Bytes.toBytes(DEFAULT_TABLE_STR); public static final byte[] DEFAULT_TABLE_BYTES = Bytes.toBytes(DEFAULT_TABLE_STR);
public static final TableName DEFAULT_TABLE = public static final TableName DEFAULT_TABLE = TableName.valueOf(DEFAULT_TABLE_BYTES);
TableName.valueOf(DEFAULT_TABLE_BYTES);
public static final String DEFAULT_CF_STR = "MyDefaultCF"; public static final String DEFAULT_CF_STR = "MyDefaultCF";
public static final byte[] DEFAULT_CF_BYTES = Bytes.toBytes(DEFAULT_CF_STR); public static final byte[] DEFAULT_CF_BYTES = Bytes.toBytes(DEFAULT_CF_STR);
public static final Set<String> DEFAULT_CF_STR_SET = public static final Set<String> DEFAULT_CF_STR_SET =
Collections.unmodifiableSet(new HashSet<>( Collections.unmodifiableSet(new HashSet<>(Arrays.asList(new String[] { DEFAULT_CF_STR })));
Arrays.asList(new String[] { DEFAULT_CF_STR })));
public static final String DEFAULT_ROW_STR = "MyTestRow"; public static final String DEFAULT_ROW_STR = "MyTestRow";
public static final byte[] DEFAULT_ROW_BYTES = Bytes.toBytes(DEFAULT_ROW_STR); public static final byte[] DEFAULT_ROW_BYTES = Bytes.toBytes(DEFAULT_ROW_STR);
@ -53,12 +56,16 @@ public class HTestConst {
public static String DEFAULT_VALUE_STR = "MyTestValue"; public static String DEFAULT_VALUE_STR = "MyTestValue";
public static byte[] DEFAULT_VALUE_BYTES = Bytes.toBytes(DEFAULT_VALUE_STR); public static byte[] DEFAULT_VALUE_BYTES = Bytes.toBytes(DEFAULT_VALUE_STR);
private static final char FIRST_CHAR = 'a';
private static final char LAST_CHAR = 'z';
private static final byte[] START_KEY_BYTES = { FIRST_CHAR, FIRST_CHAR, FIRST_CHAR };
/** /**
* Generate the given number of unique byte sequences by appending numeric * Generate the given number of unique byte sequences by appending numeric suffixes (ASCII
* suffixes (ASCII representations of decimal numbers). * representations of decimal numbers).
*/ */
public static byte[][] makeNAscii(byte[] base, int n) { public static byte[][] makeNAscii(byte[] base, int n) {
byte [][] ret = new byte[n][]; byte[][] ret = new byte[n][];
for (int i = 0; i < n; i++) { for (int i = 0; i < n; i++) {
byte[] tail = Bytes.toBytes(Integer.toString(i)); byte[] tail = Bytes.toBytes(Integer.toString(i));
ret[i] = Bytes.add(base, tail); ret[i] = Bytes.add(base, tail);
@ -66,4 +73,112 @@ public class HTestConst {
return ret; return ret;
} }
/**
* Add content to region <code>r</code> on the passed column <code>column</code>. Adds data of the
* from 'aaa', 'aab', etc where key and value are the same.
* @return count of what we added.
*/
public static long addContent(final Region r, final byte[] columnFamily, final byte[] column)
throws IOException {
byte[] startKey = r.getRegionInfo().getStartKey();
byte[] endKey = r.getRegionInfo().getEndKey();
byte[] startKeyBytes = startKey;
if (startKeyBytes == null || startKeyBytes.length == 0) {
startKeyBytes = START_KEY_BYTES;
}
return addContent(new RegionAsTable(r), Bytes.toString(columnFamily), Bytes.toString(column),
startKeyBytes, endKey, -1);
}
public static long addContent(final Region r, final byte[] columnFamily) throws IOException {
return addContent(r, columnFamily, null);
}
/**
* Add content to region <code>r</code> on the passed column <code>column</code>. Adds data of the
* from 'aaa', 'aab', etc where key and value are the same.
* @return count of what we added.
*/
public static long addContent(Table updater, String columnFamily) throws IOException {
return addContent(updater, columnFamily, START_KEY_BYTES, null);
}
public static long addContent(Table updater, String family, String column) throws IOException {
return addContent(updater, family, column, START_KEY_BYTES, null);
}
/**
* Add content to region <code>r</code> on the passed column <code>column</code>. Adds data of the
* from 'aaa', 'aab', etc where key and value are the same.
* @return count of what we added.
*/
public static long addContent(Table updater, String columnFamily, byte[] startKeyBytes,
byte[] endKey) throws IOException {
return addContent(updater, columnFamily, null, startKeyBytes, endKey, -1);
}
public static long addContent(Table updater, String family, String column, byte[] startKeyBytes,
byte[] endKey) throws IOException {
return addContent(updater, family, column, startKeyBytes, endKey, -1);
}
/**
* Add content to region <code>r</code> on the passed column <code>column</code>. Adds data of the
* from 'aaa', 'aab', etc where key and value are the same.
* @return count of what we added.
*/
public static long addContent(Table updater, String columnFamily, String column,
byte[] startKeyBytes, byte[] endKey, long ts) throws IOException {
long count = 0;
// Add rows of three characters. The first character starts with the
// 'a' character and runs up to 'z'. Per first character, we run the
// second character over same range. And same for the third so rows
// (and values) look like this: 'aaa', 'aab', 'aac', etc.
char secondCharStart = (char) startKeyBytes[1];
char thirdCharStart = (char) startKeyBytes[2];
EXIT: for (char c = (char) startKeyBytes[0]; c <= LAST_CHAR; c++) {
for (char d = secondCharStart; d <= LAST_CHAR; d++) {
for (char e = thirdCharStart; e <= LAST_CHAR; e++) {
byte[] t = new byte[] { (byte) c, (byte) d, (byte) e };
if (endKey != null && endKey.length > 0 && Bytes.compareTo(endKey, t) <= 0) {
break EXIT;
}
Put put;
if (ts != -1) {
put = new Put(t, ts);
} else {
put = new Put(t);
}
StringBuilder sb = new StringBuilder();
if (column != null && column.contains(":")) {
sb.append(column);
} else {
if (columnFamily != null) {
sb.append(columnFamily);
if (!columnFamily.endsWith(":")) {
sb.append(":");
}
if (column != null) {
sb.append(column);
}
}
}
byte[][] split = CellUtil.parseColumn(Bytes.toBytes(sb.toString()));
if (split.length == 1) {
byte[] qualifier = new byte[0];
put.addColumn(split[0], qualifier, t);
} else {
put.addColumn(split[0], split[1], t);
}
put.setDurability(Durability.SKIP_WAL);
updater.put(put);
count++;
}
// Set start character back to FIRST_CHAR after we've done first loop.
thirdCharStart = FIRST_CHAR;
}
secondCharStart = FIRST_CHAR;
}
return count;
}
} }

View File

@ -17,7 +17,7 @@
*/ */
package org.apache.hadoop.hbase.client; package org.apache.hadoop.hbase.client;
import static org.apache.hadoop.hbase.HBaseTestCase.assertByteEquals; import static org.junit.Assert.assertArrayEquals;
import java.io.IOException; import java.io.IOException;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
@ -158,7 +158,7 @@ public class TestResult extends TestCase {
for (int i = 0; i < 100; ++i) { for (int i = 0; i < 100; ++i) {
final byte[] qf = Bytes.toBytes(i); final byte[] qf = Bytes.toBytes(i);
assertByteEquals(Bytes.add(value, Bytes.toBytes(i)), r.getValue(family, qf)); assertArrayEquals(Bytes.add(value, Bytes.toBytes(i)), r.getValue(family, qf));
assertTrue(r.containsColumn(family, qf)); assertTrue(r.containsColumn(family, qf));
} }
} }
@ -177,7 +177,7 @@ public class TestResult extends TestCase {
for (int i = 0; i < 100; ++i) { for (int i = 0; i < 100; ++i) {
final byte[] qf = Bytes.toBytes(i); final byte[] qf = Bytes.toBytes(i);
assertByteEquals(Bytes.add(value, Bytes.toBytes(i)), r.getValue(family, qf)); assertArrayEquals(Bytes.add(value, Bytes.toBytes(i)), r.getValue(family, qf));
assertTrue(r.containsColumn(family, qf)); assertTrue(r.containsColumn(family, qf));
} }
} }

View File

@ -38,9 +38,9 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.Coprocessor; import org.apache.hadoop.hbase.Coprocessor;
import org.apache.hadoop.hbase.CoprocessorEnvironment; import org.apache.hadoop.hbase.CoprocessorEnvironment;
import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTestConst;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Get;
@ -295,7 +295,7 @@ public class TestCoprocessorInterface {
HRegion region = initHRegion(tableName, name.getMethodName(), hc, new Class<?>[]{}, families); HRegion region = initHRegion(tableName, name.getMethodName(), hc, new Class<?>[]{}, families);
for (int i = 0; i < 3; i++) { for (int i = 0; i < 3; i++) {
HBaseTestCase.addContent(region, fam3); HTestConst.addContent(region, fam3);
region.flush(true); region.flush(true);
} }
@ -357,7 +357,7 @@ public class TestCoprocessorInterface {
HRegion region = initHRegion(tableName, name.getMethodName(), hc, HRegion region = initHRegion(tableName, name.getMethodName(), hc,
new Class<?>[]{CoprocessorImpl.class}, families); new Class<?>[]{CoprocessorImpl.class}, families);
for (int i = 0; i < 3; i++) { for (int i = 0; i < 3; i++) {
HBaseTestCase.addContent(region, fam3); HTestConst.addContent(region, fam3);
region.flush(true); region.flush(true);
} }

View File

@ -17,12 +17,11 @@
*/ */
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import static org.apache.hadoop.hbase.HBaseTestCase.addContent; import static org.apache.hadoop.hbase.HTestConst.addContent;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.List; import java.util.List;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;

View File

@ -27,7 +27,7 @@ import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
import static org.mockito.Matchers.any; import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.spy; import static org.mockito.Mockito.spy;
@ -48,9 +48,9 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.ChoreService; import org.apache.hadoop.hbase.ChoreService;
import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTestConst;
import org.apache.hadoop.hbase.Waiter; import org.apache.hadoop.hbase.Waiter;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Delete;
@ -173,7 +173,7 @@ public class TestCompaction {
for (int j = 0; j < jmax; j++) { for (int j = 0; j < jmax; j++) {
p.addColumn(COLUMN_FAMILY, Bytes.toBytes(j), pad); p.addColumn(COLUMN_FAMILY, Bytes.toBytes(j), pad);
} }
HBaseTestCase.addContent(loader, Bytes.toString(COLUMN_FAMILY)); HTestConst.addContent(loader, Bytes.toString(COLUMN_FAMILY));
loader.put(p); loader.put(p);
r.flush(true); r.flush(true);
} }
@ -249,7 +249,7 @@ public class TestCompaction {
for (int j = 0; j < jmax; j++) { for (int j = 0; j < jmax; j++) {
p.addColumn(COLUMN_FAMILY, Bytes.toBytes(j), pad); p.addColumn(COLUMN_FAMILY, Bytes.toBytes(j), pad);
} }
HBaseTestCase.addContent(loader, Bytes.toString(COLUMN_FAMILY)); HTestConst.addContent(loader, Bytes.toString(COLUMN_FAMILY));
loader.put(p); loader.put(p);
r.flush(true); r.flush(true);
} }
@ -329,7 +329,7 @@ public class TestCompaction {
private void createStoreFile(final HRegion region, String family) throws IOException { private void createStoreFile(final HRegion region, String family) throws IOException {
Table loader = new RegionAsTable(region); Table loader = new RegionAsTable(region);
HBaseTestCase.addContent(loader, family); HTestConst.addContent(loader, family);
region.flush(true); region.flush(true);
} }
@ -503,7 +503,7 @@ public class TestCompaction {
for (int j = 0; j < jmax; j++) { for (int j = 0; j < jmax; j++) {
p.addColumn(COLUMN_FAMILY, Bytes.toBytes(j), pad); p.addColumn(COLUMN_FAMILY, Bytes.toBytes(j), pad);
} }
HBaseTestCase.addContent(loader, Bytes.toString(COLUMN_FAMILY)); HTestConst.addContent(loader, Bytes.toString(COLUMN_FAMILY));
loader.put(p); loader.put(p);
r.flush(true); r.flush(true);
} }

View File

@ -17,6 +17,13 @@
*/ */
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import static org.mockito.Mockito.mock; import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when; import static org.mockito.Mockito.when;
@ -37,7 +44,6 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
@ -59,7 +65,6 @@ import org.apache.hadoop.hbase.io.hfile.CacheStats;
import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.io.hfile.HFileContext;
import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder; import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder; import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder;
import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl;
import org.apache.hadoop.hbase.io.hfile.HFileInfo; import org.apache.hadoop.hbase.io.hfile.HFileInfo;
import org.apache.hadoop.hbase.io.hfile.HFileScanner; import org.apache.hadoop.hbase.io.hfile.HFileScanner;
import org.apache.hadoop.hbase.io.hfile.ReaderContext; import org.apache.hadoop.hbase.io.hfile.ReaderContext;
@ -70,11 +75,13 @@ import org.apache.hadoop.hbase.util.BloomFilterFactory;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ChecksumType; import org.apache.hadoop.hbase.util.ChecksumType;
import org.apache.hadoop.hbase.util.CommonFSUtils; import org.apache.hadoop.hbase.util.CommonFSUtils;
import org.junit.After; import org.junit.AfterClass;
import org.junit.Before; import org.junit.Before;
import org.junit.ClassRule; import org.junit.ClassRule;
import org.junit.Rule;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
import org.mockito.Mockito; import org.mockito.Mockito;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@ -86,12 +93,12 @@ import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
/** /**
* Test HStoreFile * Test HStoreFile
*/ */
@Category({RegionServerTests.class, MediumTests.class}) @Category({ RegionServerTests.class, MediumTests.class })
public class TestHStoreFile extends HBaseTestCase { public class TestHStoreFile {
@ClassRule @ClassRule
public static final HBaseClassTestRule CLASS_RULE = public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestHStoreFile.class); HBaseClassTestRule.forClass(TestHStoreFile.class);
private static final Logger LOG = LoggerFactory.getLogger(TestHStoreFile.class); private static final Logger LOG = LoggerFactory.getLogger(TestHStoreFile.class);
private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility(); private static final HBaseTestingUtility TEST_UTIL = new HBaseTestingUtility();
@ -100,22 +107,31 @@ public class TestHStoreFile extends HBaseTestCase {
private static final ChecksumType CKTYPE = ChecksumType.CRC32C; private static final ChecksumType CKTYPE = ChecksumType.CRC32C;
private static final int CKBYTES = 512; private static final int CKBYTES = 512;
private static String TEST_FAMILY = "cf"; private static String TEST_FAMILY = "cf";
private static final char FIRST_CHAR = 'a';
private static final char LAST_CHAR = 'z';
@Rule
public TestName name = new TestName();
private Configuration conf;
private Path testDir;
private FileSystem fs;
@Override
@Before @Before
public void setUp() throws Exception { public void setUp() throws IOException {
super.setUp(); conf = TEST_UTIL.getConfiguration();
testDir = TEST_UTIL.getDataTestDir(name.getMethodName());
fs = testDir.getFileSystem(conf);
} }
@Override @AfterClass
@After public static void tearDownAfterClass() {
public void tearDown() throws Exception { TEST_UTIL.cleanupTestDir();
super.tearDown();
} }
/** /**
* Write a file and then assert that we can read from top and bottom halves * Write a file and then assert that we can read from top and bottom halves using two
* using two HalfMapFiles. * HalfMapFiles.
*/ */
@Test @Test
public void testBasicHalfMapFile() throws Exception { public void testBasicHalfMapFile() throws Exception {
@ -124,11 +140,9 @@ public class TestHStoreFile extends HBaseTestCase {
HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(conf, fs, HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(conf, fs,
new Path(testDir, hri.getTable().getNameAsString()), hri); new Path(testDir, hri.getTable().getNameAsString()), hri);
HFileContext meta = new HFileContextBuilder().withBlockSize(2*1024).build(); HFileContext meta = new HFileContextBuilder().withBlockSize(2 * 1024).build();
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs) StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
.withFilePath(regionFs.createTempName()) .withFilePath(regionFs.createTempName()).withFileContext(meta).build();
.withFileContext(meta)
.build();
writeStoreFile(writer); writeStoreFile(writer);
Path sfPath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath()); Path sfPath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath());
@ -137,20 +151,20 @@ public class TestHStoreFile extends HBaseTestCase {
} }
private void writeStoreFile(final StoreFileWriter writer) throws IOException { private void writeStoreFile(final StoreFileWriter writer) throws IOException {
writeStoreFile(writer, Bytes.toBytes(getName()), Bytes.toBytes(getName())); writeStoreFile(writer, Bytes.toBytes(name.getMethodName()),
Bytes.toBytes(name.getMethodName()));
} }
// pick an split point (roughly halfway) // pick an split point (roughly halfway)
byte[] SPLITKEY = new byte[] { (LAST_CHAR + FIRST_CHAR)/2, FIRST_CHAR}; byte[] SPLITKEY = new byte[] { (LAST_CHAR + FIRST_CHAR) / 2, FIRST_CHAR };
/* /*
* Writes HStoreKey and ImmutableBytes data to passed writer and * Writes HStoreKey and ImmutableBytes data to passed writer and then closes it.
* then closes it.
* @param writer * @param writer
* @throws IOException * @throws IOException
*/ */
public static void writeStoreFile(final StoreFileWriter writer, byte[] fam, byte[] qualifier) public static void writeStoreFile(final StoreFileWriter writer, byte[] fam, byte[] qualifier)
throws IOException { throws IOException {
long now = System.currentTimeMillis(); long now = System.currentTimeMillis();
try { try {
for (char d = FIRST_CHAR; d <= LAST_CHAR; d++) { for (char d = FIRST_CHAR; d <= LAST_CHAR; d++) {
@ -165,8 +179,8 @@ public class TestHStoreFile extends HBaseTestCase {
} }
/** /**
* Test that our mechanism of writing store files in one region to reference * Test that our mechanism of writing store files in one region to reference store files in other
* store files in other regions works. * regions works.
*/ */
@Test @Test
public void testReference() throws IOException { public void testReference() throws IOException {
@ -178,20 +192,18 @@ public class TestHStoreFile extends HBaseTestCase {
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
// Make a store file and write data to it. // Make a store file and write data to it.
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs) StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
.withFilePath(regionFs.createTempName()) .withFilePath(regionFs.createTempName()).withFileContext(meta).build();
.withFileContext(meta)
.build();
writeStoreFile(writer); writeStoreFile(writer);
Path hsfPath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath()); Path hsfPath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath());
HStoreFile hsf = new HStoreFile(this.fs, hsfPath, conf, cacheConf, BloomType.NONE, true); HStoreFile hsf = new HStoreFile(this.fs, hsfPath, conf, cacheConf, BloomType.NONE, true);
hsf.initReader(); hsf.initReader();
StoreFileReader reader = hsf.getReader(); StoreFileReader reader = hsf.getReader();
// Split on a row, not in middle of row. Midkey returned by reader // Split on a row, not in middle of row. Midkey returned by reader
// may be in middle of row. Create new one with empty column and // may be in middle of row. Create new one with empty column and
// timestamp. // timestamp.
byte [] midRow = CellUtil.cloneRow(reader.midKey().get()); byte[] midRow = CellUtil.cloneRow(reader.midKey().get());
byte [] finalRow = CellUtil.cloneRow(reader.getLastKey().get()); byte[] finalRow = CellUtil.cloneRow(reader.getLastKey().get());
hsf.closeStoreFile(true); hsf.closeStoreFile(true);
// Make a reference // Make a reference
@ -219,14 +231,14 @@ public class TestHStoreFile extends HBaseTestCase {
@Test @Test
public void testStoreFileReference() throws Exception { public void testStoreFileReference() throws Exception {
final RegionInfo hri = final RegionInfo hri =
RegionInfoBuilder.newBuilder(TableName.valueOf("testStoreFileReference")).build(); RegionInfoBuilder.newBuilder(TableName.valueOf("testStoreFileReference")).build();
HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(conf, fs, HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(conf, fs,
new Path(testDir, hri.getTable().getNameAsString()), hri); new Path(testDir, hri.getTable().getNameAsString()), hri);
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
// Make a store file and write data to it. // Make a store file and write data to it.
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs) StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
.withFilePath(regionFs.createTempName()).withFileContext(meta).build(); .withFilePath(regionFs.createTempName()).withFileContext(meta).build();
writeStoreFile(writer); writeStoreFile(writer);
Path hsfPath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath()); Path hsfPath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath());
writer.close(); writer.close();
@ -236,7 +248,7 @@ public class TestHStoreFile extends HBaseTestCase {
StoreFileReader r = file.getReader(); StoreFileReader r = file.getReader();
assertNotNull(r); assertNotNull(r);
StoreFileScanner scanner = StoreFileScanner scanner =
new StoreFileScanner(r, mock(HFileScanner.class), false, false, 0, 0, false); new StoreFileScanner(r, mock(HFileScanner.class), false, false, 0, 0, false);
// Verify after instantiating scanner refCount is increased // Verify after instantiating scanner refCount is increased
assertTrue("Verify file is being referenced", file.isReferencedInReads()); assertTrue("Verify file is being referenced", file.isReferencedInReads());
@ -252,11 +264,12 @@ public class TestHStoreFile extends HBaseTestCase {
byte[] cf = Bytes.toBytes("ty"); byte[] cf = Bytes.toBytes("ty");
ColumnFamilyDescriptor cfd = ColumnFamilyDescriptorBuilder.of(cf); ColumnFamilyDescriptor cfd = ColumnFamilyDescriptorBuilder.of(cf);
when(store.getColumnFamilyDescriptor()).thenReturn(cfd); when(store.getColumnFamilyDescriptor()).thenReturn(cfd);
StoreFileScanner scanner = try (StoreFileScanner scanner =
new StoreFileScanner(reader, mock(HFileScanner.class), false, false, 0, 0, true); new StoreFileScanner(reader, mock(HFileScanner.class), false, false, 0, 0, true)) {
Scan scan = new Scan(); Scan scan = new Scan();
scan.setColumnFamilyTimeRange(cf, 0, 1); scan.setColumnFamilyTimeRange(cf, 0, 1);
assertFalse(scanner.shouldUseScanner(scan, store, 0)); assertFalse(scanner.shouldUseScanner(scan, store, 0));
}
} }
@Test @Test
@ -266,22 +279,20 @@ public class TestHStoreFile extends HBaseTestCase {
// force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/ // force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/
Configuration testConf = new Configuration(this.conf); Configuration testConf = new Configuration(this.conf);
CommonFSUtils.setRootDir(testConf, testDir); CommonFSUtils.setRootDir(testConf, testDir);
HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem( HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(testConf, fs,
testConf, fs, CommonFSUtils.getTableDir(testDir, hri.getTable()), hri); CommonFSUtils.getTableDir(testDir, hri.getTable()), hri);
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
// Make a store file and write data to it. // Make a store file and write data to it.
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs) StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
.withFilePath(regionFs.createTempName()) .withFilePath(regionFs.createTempName()).withFileContext(meta).build();
.withFileContext(meta)
.build();
writeStoreFile(writer); writeStoreFile(writer);
Path storeFilePath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath()); Path storeFilePath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath());
Path dstPath = new Path(regionFs.getTableDir(), new Path("test-region", TEST_FAMILY)); Path dstPath = new Path(regionFs.getTableDir(), new Path("test-region", TEST_FAMILY));
HFileLink.create(testConf, this.fs, dstPath, hri, storeFilePath.getName()); HFileLink.create(testConf, this.fs, dstPath, hri, storeFilePath.getName());
Path linkFilePath = new Path(dstPath, Path linkFilePath =
HFileLink.createHFileLinkName(hri, storeFilePath.getName())); new Path(dstPath, HFileLink.createHFileLinkName(hri, storeFilePath.getName()));
// Try to open store file from link // Try to open store file from link
StoreFileInfo storeFileInfo = new StoreFileInfo(testConf, this.fs, linkFilePath, true); StoreFileInfo storeFileInfo = new StoreFileInfo(testConf, this.fs, linkFilePath, true);
@ -300,8 +311,8 @@ public class TestHStoreFile extends HBaseTestCase {
} }
/** /**
* This test creates an hfile and then the dir structures and files to verify that references * This test creates an hfile and then the dir structures and files to verify that references to
* to hfilelinks (created by snapshot clones) can be properly interpreted. * hfilelinks (created by snapshot clones) can be properly interpreted.
*/ */
@Test @Test
public void testReferenceToHFileLink() throws IOException { public void testReferenceToHFileLink() throws IOException {
@ -317,21 +328,18 @@ public class TestHStoreFile extends HBaseTestCase {
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
// Make a store file and write data to it. <root>/<tablename>/<rgn>/<cf>/<file> // Make a store file and write data to it. <root>/<tablename>/<rgn>/<cf>/<file>
StoreFileWriter writer = new StoreFileWriter.Builder(testConf, cacheConf, this.fs) StoreFileWriter writer = new StoreFileWriter.Builder(testConf, cacheConf, this.fs)
.withFilePath(regionFs.createTempName()) .withFilePath(regionFs.createTempName()).withFileContext(meta).build();
.withFileContext(meta)
.build();
writeStoreFile(writer); writeStoreFile(writer);
Path storeFilePath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath()); Path storeFilePath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath());
// create link to store file. <root>/clone/region/<cf>/<hfile>-<region>-<table> // create link to store file. <root>/clone/region/<cf>/<hfile>-<region>-<table>
RegionInfo hriClone = RegionInfoBuilder.newBuilder(TableName.valueOf("clone")).build(); RegionInfo hriClone = RegionInfoBuilder.newBuilder(TableName.valueOf("clone")).build();
HRegionFileSystem cloneRegionFs = HRegionFileSystem.createRegionOnFileSystem( HRegionFileSystem cloneRegionFs = HRegionFileSystem.createRegionOnFileSystem(testConf, fs,
testConf, fs, CommonFSUtils.getTableDir(testDir, hri.getTable()), CommonFSUtils.getTableDir(testDir, hri.getTable()), hriClone);
hriClone);
Path dstPath = cloneRegionFs.getStoreDir(TEST_FAMILY); Path dstPath = cloneRegionFs.getStoreDir(TEST_FAMILY);
HFileLink.create(testConf, this.fs, dstPath, hri, storeFilePath.getName()); HFileLink.create(testConf, this.fs, dstPath, hri, storeFilePath.getName());
Path linkFilePath = new Path(dstPath, Path linkFilePath =
HFileLink.createHFileLinkName(hri, storeFilePath.getName())); new Path(dstPath, HFileLink.createHFileLinkName(hri, storeFilePath.getName()));
// create splits of the link. // create splits of the link.
// <root>/clone/splitA/<cf>/<reftohfilelink>, // <root>/clone/splitA/<cf>/<reftohfilelink>,
@ -348,7 +356,7 @@ public class TestHStoreFile extends HBaseTestCase {
CommonFSUtils.logFileSystemState(fs, testDir, LOG); CommonFSUtils.logFileSystemState(fs, testDir, LOG);
// There is a case where a file with the hfilelink pattern is actually a daughter // There is a case where a file with the hfilelink pattern is actually a daughter
// reference to a hfile link. This code in StoreFile that handles this case. // reference to a hfile link. This code in StoreFile that handles this case.
// Try to open store file from link // Try to open store file from link
HStoreFile hsfA = new HStoreFile(this.fs, pathA, testConf, cacheConf, BloomType.NONE, true); HStoreFile hsfA = new HStoreFile(this.fs, pathA, testConf, cacheConf, BloomType.NONE, true);
@ -371,7 +379,7 @@ public class TestHStoreFile extends HBaseTestCase {
HFileScanner sB = hsfB.getReader().getScanner(false, false); HFileScanner sB = hsfB.getReader().getScanner(false, false);
sB.seekTo(); sB.seekTo();
//count++ as seekTo() will advance the scanner // count++ as seekTo() will advance the scanner
count++; count++;
while (sB.next()) { while (sB.next()) {
count++; count++;
@ -385,8 +393,8 @@ public class TestHStoreFile extends HBaseTestCase {
throws IOException { throws IOException {
f.initReader(); f.initReader();
Cell midkey = f.getReader().midKey().get(); Cell midkey = f.getReader().midKey().get();
KeyValue midKV = (KeyValue)midkey; KeyValue midKV = (KeyValue) midkey;
byte [] midRow = CellUtil.cloneRow(midKV); byte[] midRow = CellUtil.cloneRow(midKV);
// Create top split. // Create top split.
RegionInfo topHri = RegionInfo topHri =
RegionInfoBuilder.newBuilder(regionFs.getRegionInfo().getTable()).setEndKey(SPLITKEY).build(); RegionInfoBuilder.newBuilder(regionFs.getRegionInfo().getTable()).setEndKey(SPLITKEY).build();
@ -414,13 +422,12 @@ public class TestHStoreFile extends HBaseTestCase {
ByteBuffer key = null; ByteBuffer key = null;
HFileScanner topScanner = top.getScanner(false, false); HFileScanner topScanner = top.getScanner(false, false);
while ((!topScanner.isSeeked() && topScanner.seekTo()) || while ((!topScanner.isSeeked() && topScanner.seekTo()) ||
(topScanner.isSeeked() && topScanner.next())) { (topScanner.isSeeked() && topScanner.next())) {
key = ByteBuffer.wrap(((KeyValue) topScanner.getKey()).getKey()); key = ByteBuffer.wrap(((KeyValue) topScanner.getKey()).getKey());
if ((PrivateCellUtil.compare(topScanner.getReader().getComparator(), midKV, key.array(), if ((PrivateCellUtil.compare(topScanner.getReader().getComparator(), midKV, key.array(),
key.arrayOffset(), key.limit())) > 0) { key.arrayOffset(), key.limit())) > 0) {
fail("key=" + Bytes.toStringBinary(key) + " < midkey=" + fail("key=" + Bytes.toStringBinary(key) + " < midkey=" + midkey);
midkey);
} }
if (first) { if (first) {
first = false; first = false;
@ -431,14 +438,12 @@ public class TestHStoreFile extends HBaseTestCase {
first = true; first = true;
HFileScanner bottomScanner = bottom.getScanner(false, false); HFileScanner bottomScanner = bottom.getScanner(false, false);
while ((!bottomScanner.isSeeked() && bottomScanner.seekTo()) || while ((!bottomScanner.isSeeked() && bottomScanner.seekTo()) || bottomScanner.next()) {
bottomScanner.next()) {
previous = ByteBuffer.wrap(((KeyValue) bottomScanner.getKey()).getKey()); previous = ByteBuffer.wrap(((KeyValue) bottomScanner.getKey()).getKey());
key = ByteBuffer.wrap(((KeyValue) bottomScanner.getKey()).getKey()); key = ByteBuffer.wrap(((KeyValue) bottomScanner.getKey()).getKey());
if (first) { if (first) {
first = false; first = false;
LOG.info("First in bottom: " + LOG.info("First in bottom: " + Bytes.toString(Bytes.toBytes(previous)));
Bytes.toString(Bytes.toBytes(previous)));
} }
assertTrue(key.compareTo(bbMidkeyBytes) < 0); assertTrue(key.compareTo(bbMidkeyBytes) < 0);
} }
@ -452,7 +457,7 @@ public class TestHStoreFile extends HBaseTestCase {
// Next test using a midkey that does not exist in the file. // Next test using a midkey that does not exist in the file.
// First, do a key that is < than first key. Ensure splits behave // First, do a key that is < than first key. Ensure splits behave
// properly. // properly.
byte [] badmidkey = Bytes.toBytes(" ."); byte[] badmidkey = Bytes.toBytes(" .");
assertTrue(fs.exists(f.getPath())); assertTrue(fs.exists(f.getPath()));
topPath = splitStoreFile(regionFs, topHri, TEST_FAMILY, f, badmidkey, true); topPath = splitStoreFile(regionFs, topHri, TEST_FAMILY, f, badmidkey, true);
bottomPath = splitStoreFile(regionFs, bottomHri, TEST_FAMILY, f, badmidkey, false); bottomPath = splitStoreFile(regionFs, bottomHri, TEST_FAMILY, f, badmidkey, false);
@ -466,8 +471,7 @@ public class TestHStoreFile extends HBaseTestCase {
first = true; first = true;
topScanner = top.getScanner(false, false); topScanner = top.getScanner(false, false);
KeyValue.KeyOnlyKeyValue keyOnlyKV = new KeyValue.KeyOnlyKeyValue(); KeyValue.KeyOnlyKeyValue keyOnlyKV = new KeyValue.KeyOnlyKeyValue();
while ((!topScanner.isSeeked() && topScanner.seekTo()) || while ((!topScanner.isSeeked() && topScanner.seekTo()) || topScanner.next()) {
topScanner.next()) {
key = ByteBuffer.wrap(((KeyValue) topScanner.getKey()).getKey()); key = ByteBuffer.wrap(((KeyValue) topScanner.getKey()).getKey());
keyOnlyKV.setKey(key.array(), 0 + key.arrayOffset(), key.limit()); keyOnlyKV.setKey(key.array(), 0 + key.arrayOffset(), key.limit());
assertTrue(PrivateCellUtil.compare(topScanner.getReader().getComparator(), keyOnlyKV, assertTrue(PrivateCellUtil.compare(topScanner.getReader().getComparator(), keyOnlyKV,
@ -477,7 +481,7 @@ public class TestHStoreFile extends HBaseTestCase {
KeyValue keyKV = KeyValueUtil.createKeyValueFromKey(key); KeyValue keyKV = KeyValueUtil.createKeyValueFromKey(key);
LOG.info("First top when key < bottom: " + keyKV); LOG.info("First top when key < bottom: " + keyKV);
String tmp = String tmp =
Bytes.toString(keyKV.getRowArray(), keyKV.getRowOffset(), keyKV.getRowLength()); Bytes.toString(keyKV.getRowArray(), keyKV.getRowOffset(), keyKV.getRowLength());
for (int i = 0; i < tmp.length(); i++) { for (int i = 0; i < tmp.length(); i++) {
assertTrue(tmp.charAt(i) == 'a'); assertTrue(tmp.charAt(i) == 'a');
} }
@ -495,7 +499,7 @@ public class TestHStoreFile extends HBaseTestCase {
// Test when badkey is > than last key in file ('||' > 'zz'). // Test when badkey is > than last key in file ('||' > 'zz').
badmidkey = Bytes.toBytes("|||"); badmidkey = Bytes.toBytes("|||");
topPath = splitStoreFile(regionFs,topHri, TEST_FAMILY, f, badmidkey, true); topPath = splitStoreFile(regionFs, topHri, TEST_FAMILY, f, badmidkey, true);
bottomPath = splitStoreFile(regionFs, bottomHri, TEST_FAMILY, f, badmidkey, false); bottomPath = splitStoreFile(regionFs, bottomHri, TEST_FAMILY, f, badmidkey, false);
assertNull(topPath); assertNull(topPath);
@ -504,8 +508,7 @@ public class TestHStoreFile extends HBaseTestCase {
bottom = bottomF.getReader(); bottom = bottomF.getReader();
first = true; first = true;
bottomScanner = bottom.getScanner(false, false); bottomScanner = bottom.getScanner(false, false);
while ((!bottomScanner.isSeeked() && bottomScanner.seekTo()) || while ((!bottomScanner.isSeeked() && bottomScanner.seekTo()) || bottomScanner.next()) {
bottomScanner.next()) {
key = ByteBuffer.wrap(((KeyValue) bottomScanner.getKey()).getKey()); key = ByteBuffer.wrap(((KeyValue) bottomScanner.getKey()).getKey());
if (first) { if (first) {
first = false; first = false;
@ -521,7 +524,7 @@ public class TestHStoreFile extends HBaseTestCase {
LOG.info("Last bottom when key > top: " + keyKV); LOG.info("Last bottom when key > top: " + keyKV);
for (int i = 0; i < tmp.length(); i++) { for (int i = 0; i < tmp.length(); i++) {
assertTrue(Bytes.toString(keyKV.getRowArray(), keyKV.getRowOffset(), keyKV.getRowLength()) assertTrue(Bytes.toString(keyKV.getRowArray(), keyKV.getRowOffset(), keyKV.getRowLength())
.charAt(i) == 'z'); .charAt(i) == 'z');
} }
} finally { } finally {
if (top != null) { if (top != null) {
@ -535,7 +538,7 @@ public class TestHStoreFile extends HBaseTestCase {
} }
private static StoreFileScanner getStoreFileScanner(StoreFileReader reader, boolean cacheBlocks, private static StoreFileScanner getStoreFileScanner(StoreFileReader reader, boolean cacheBlocks,
boolean pread) { boolean pread) {
return reader.getStoreFileScanner(cacheBlocks, pread, false, 0, 0, false); return reader.getStoreFileScanner(cacheBlocks, pread, false, 0, 0, false);
} }
@ -547,8 +550,8 @@ public class TestHStoreFile extends HBaseTestCase {
long now = System.currentTimeMillis(); long now = System.currentTimeMillis();
for (int i = 0; i < 2000; i += 2) { for (int i = 0; i < 2000; i += 2) {
String row = String.format(localFormatter, i); String row = String.format(localFormatter, i);
KeyValue kv = new KeyValue(Bytes.toBytes(row), Bytes.toBytes("family"), KeyValue kv = new KeyValue(Bytes.toBytes(row), Bytes.toBytes("family"), Bytes.toBytes("col"),
Bytes.toBytes("col"), now, Bytes.toBytes("value")); now, Bytes.toBytes("value"));
writer.append(kv); writer.append(kv);
} }
writer.close(); writer.close();
@ -556,7 +559,7 @@ public class TestHStoreFile extends HBaseTestCase {
ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, f).build(); ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, f).build();
HFileInfo fileInfo = new HFileInfo(context, conf); HFileInfo fileInfo = new HFileInfo(context, conf);
StoreFileReader reader = StoreFileReader reader =
new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf); new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf);
fileInfo.initMetaAndIndex(reader.getHFileReader()); fileInfo.initMetaAndIndex(reader.getHFileReader());
reader.loadFileInfo(); reader.loadFileInfo();
reader.loadBloomfilter(); reader.loadBloomfilter();
@ -570,12 +573,11 @@ public class TestHStoreFile extends HBaseTestCase {
TreeSet<byte[]> columns = new TreeSet<>(Bytes.BYTES_COMPARATOR); TreeSet<byte[]> columns = new TreeSet<>(Bytes.BYTES_COMPARATOR);
columns.add(Bytes.toBytes("family:col")); columns.add(Bytes.toBytes("family:col"));
Scan scan = new Scan().withStartRow(Bytes.toBytes(row)) Scan scan = new Scan().withStartRow(Bytes.toBytes(row)).withStopRow(Bytes.toBytes(row), true);
.withStopRow(Bytes.toBytes(row), true);
scan.addColumn(Bytes.toBytes("family"), Bytes.toBytes("family:col")); scan.addColumn(Bytes.toBytes("family"), Bytes.toBytes("family:col"));
HStore store = mock(HStore.class); HStore store = mock(HStore.class);
when(store.getColumnFamilyDescriptor()) when(store.getColumnFamilyDescriptor())
.thenReturn(ColumnFamilyDescriptorBuilder.of("family")); .thenReturn(ColumnFamilyDescriptorBuilder.of("family"));
boolean exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE); boolean exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);
if (i % 2 == 0) { if (i % 2 == 0) {
if (!exists) { if (!exists) {
@ -591,60 +593,48 @@ public class TestHStoreFile extends HBaseTestCase {
fs.delete(f, true); fs.delete(f, true);
assertEquals("False negatives: " + falseNeg, 0, falseNeg); assertEquals("False negatives: " + falseNeg, 0, falseNeg);
int maxFalsePos = (int) (2 * 2000 * err); int maxFalsePos = (int) (2 * 2000 * err);
assertTrue("Too many false positives: " + falsePos + " (err=" + err + ", expected no more than " assertTrue("Too many false positives: " + falsePos + " (err=" + err +
+ maxFalsePos + ")", falsePos <= maxFalsePos); ", expected no more than " + maxFalsePos + ")", falsePos <= maxFalsePos);
} }
private static final int BLOCKSIZE_SMALL = 8192; private static final int BLOCKSIZE_SMALL = 8192;
@Test @Test
public void testBloomFilter() throws Exception { public void testBloomFilter() throws Exception {
FileSystem fs = FileSystem.getLocal(conf);
conf.setFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, (float) 0.01); conf.setFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, (float) 0.01);
conf.setBoolean(BloomFilterFactory.IO_STOREFILE_BLOOM_ENABLED, true); conf.setBoolean(BloomFilterFactory.IO_STOREFILE_BLOOM_ENABLED, true);
// write the file // write the file
Path f = new Path(ROOT_DIR, getName()); Path f = new Path(ROOT_DIR, name.getMethodName());
HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL) HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL)
.withChecksumType(CKTYPE) .withChecksumType(CKTYPE).withBytesPerCheckSum(CKBYTES).build();
.withBytesPerCheckSum(CKBYTES).build();
// Make a store file and write data to it. // Make a store file and write data to it.
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs) StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs).withFilePath(f)
.withFilePath(f) .withBloomType(BloomType.ROW).withMaxKeyCount(2000).withFileContext(meta).build();
.withBloomType(BloomType.ROW)
.withMaxKeyCount(2000)
.withFileContext(meta)
.build();
bloomWriteRead(writer, fs); bloomWriteRead(writer, fs);
} }
@Test @Test
public void testDeleteFamilyBloomFilter() throws Exception { public void testDeleteFamilyBloomFilter() throws Exception {
FileSystem fs = FileSystem.getLocal(conf);
conf.setFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, (float) 0.01); conf.setFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, (float) 0.01);
conf.setBoolean(BloomFilterFactory.IO_STOREFILE_BLOOM_ENABLED, true); conf.setBoolean(BloomFilterFactory.IO_STOREFILE_BLOOM_ENABLED, true);
float err = conf.getFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, 0); float err = conf.getFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, 0);
// write the file // write the file
Path f = new Path(ROOT_DIR, getName()); Path f = new Path(ROOT_DIR, name.getMethodName());
HFileContext meta = new HFileContextBuilder() HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL)
.withBlockSize(BLOCKSIZE_SMALL) .withChecksumType(CKTYPE).withBytesPerCheckSum(CKBYTES).build();
.withChecksumType(CKTYPE)
.withBytesPerCheckSum(CKBYTES).build();
// Make a store file and write data to it. // Make a store file and write data to it.
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs) StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs).withFilePath(f)
.withFilePath(f) .withMaxKeyCount(2000).withFileContext(meta).build();
.withMaxKeyCount(2000)
.withFileContext(meta)
.build();
// add delete family // add delete family
long now = System.currentTimeMillis(); long now = System.currentTimeMillis();
for (int i = 0; i < 2000; i += 2) { for (int i = 0; i < 2000; i += 2) {
String row = String.format(localFormatter, i); String row = String.format(localFormatter, i);
KeyValue kv = new KeyValue(Bytes.toBytes(row), Bytes.toBytes("family"), KeyValue kv = new KeyValue(Bytes.toBytes(row), Bytes.toBytes("family"), Bytes.toBytes("col"),
Bytes.toBytes("col"), now, KeyValue.Type.DeleteFamily, Bytes.toBytes("value")); now, KeyValue.Type.DeleteFamily, Bytes.toBytes("value"));
writer.append(kv); writer.append(kv);
} }
writer.close(); writer.close();
@ -652,7 +642,7 @@ public class TestHStoreFile extends HBaseTestCase {
ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, f).build(); ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, f).build();
HFileInfo fileInfo = new HFileInfo(context, conf); HFileInfo fileInfo = new HFileInfo(context, conf);
StoreFileReader reader = StoreFileReader reader =
new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf); new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf);
fileInfo.initMetaAndIndex(reader.getHFileReader()); fileInfo.initMetaAndIndex(reader.getHFileReader());
reader.loadFileInfo(); reader.loadFileInfo();
reader.loadBloomfilter(); reader.loadBloomfilter();
@ -679,8 +669,8 @@ public class TestHStoreFile extends HBaseTestCase {
fs.delete(f, true); fs.delete(f, true);
assertEquals("False negatives: " + falseNeg, 0, falseNeg); assertEquals("False negatives: " + falseNeg, 0, falseNeg);
int maxFalsePos = (int) (2 * 2000 * err); int maxFalsePos = (int) (2 * 2000 * err);
assertTrue("Too many false positives: " + falsePos + " (err=" + err assertTrue("Too many false positives: " + falsePos + " (err=" + err +
+ ", expected no more than " + maxFalsePos, falsePos <= maxFalsePos); ", expected no more than " + maxFalsePos, falsePos <= maxFalsePos);
} }
/** /**
@ -689,13 +679,11 @@ public class TestHStoreFile extends HBaseTestCase {
@Test @Test
public void testReseek() throws Exception { public void testReseek() throws Exception {
// write the file // write the file
Path f = new Path(ROOT_DIR, getName()); Path f = new Path(ROOT_DIR, name.getMethodName());
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
// Make a store file and write data to it. // Make a store file and write data to it.
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs) StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs).withFilePath(f)
.withFilePath(f) .withFileContext(meta).build();
.withFileContext(meta)
.build();
writeStoreFile(writer); writeStoreFile(writer);
writer.close(); writer.close();
@ -703,7 +691,7 @@ public class TestHStoreFile extends HBaseTestCase {
ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, f).build(); ReaderContext context = new ReaderContextBuilder().withFileSystemAndPath(fs, f).build();
HFileInfo fileInfo = new HFileInfo(context, conf); HFileInfo fileInfo = new HFileInfo(context, conf);
StoreFileReader reader = StoreFileReader reader =
new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf); new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf);
fileInfo.initMetaAndIndex(reader.getHFileReader()); fileInfo.initMetaAndIndex(reader.getHFileReader());
// Now do reseek with empty KV to position to the beginning of the file // Now do reseek with empty KV to position to the beginning of the file
@ -727,51 +715,43 @@ public class TestHStoreFile extends HBaseTestCase {
int versions = 2; int versions = 2;
// run once using columns and once using rows // run once using columns and once using rows
BloomType[] bt = {BloomType.ROWCOL, BloomType.ROW}; BloomType[] bt = { BloomType.ROWCOL, BloomType.ROW };
int[] expKeys = {rowCount*colCount, rowCount}; int[] expKeys = { rowCount * colCount, rowCount };
// below line deserves commentary. it is expected bloom false positives // below line deserves commentary. it is expected bloom false positives
// column = rowCount*2*colCount inserts // column = rowCount*2*colCount inserts
// row-level = only rowCount*2 inserts, but failures will be magnified by // row-level = only rowCount*2 inserts, but failures will be magnified by
// 2nd for loop for every column (2*colCount) // 2nd for loop for every column (2*colCount)
float[] expErr = {2*rowCount*colCount*err, 2*rowCount*2*colCount*err}; float[] expErr = { 2 * rowCount * colCount * err, 2 * rowCount * 2 * colCount * err };
for (int x : new int[]{0,1}) { for (int x : new int[] { 0, 1 }) {
// write the file // write the file
Path f = new Path(ROOT_DIR, getName() + x); Path f = new Path(ROOT_DIR, name.getMethodName() + x);
HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL) HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL)
.withChecksumType(CKTYPE) .withChecksumType(CKTYPE).withBytesPerCheckSum(CKBYTES).build();
.withBytesPerCheckSum(CKBYTES).build();
// Make a store file and write data to it. // Make a store file and write data to it.
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs) StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs).withFilePath(f)
.withFilePath(f) .withBloomType(bt[x]).withMaxKeyCount(expKeys[x]).withFileContext(meta).build();
.withBloomType(bt[x])
.withMaxKeyCount(expKeys[x])
.withFileContext(meta)
.build();
long now = System.currentTimeMillis(); long now = System.currentTimeMillis();
for (int i = 0; i < rowCount*2; i += 2) { // rows for (int i = 0; i < rowCount * 2; i += 2) { // rows
for (int j = 0; j < colCount*2; j += 2) { // column qualifiers for (int j = 0; j < colCount * 2; j += 2) { // column qualifiers
String row = String.format(localFormatter, i); String row = String.format(localFormatter, i);
String col = String.format(localFormatter, j); String col = String.format(localFormatter, j);
for (int k= 0; k < versions; ++k) { // versions for (int k = 0; k < versions; ++k) { // versions
KeyValue kv = new KeyValue(Bytes.toBytes(row), Bytes.toBytes("family"), KeyValue kv = new KeyValue(Bytes.toBytes(row), Bytes.toBytes("family"),
Bytes.toBytes("col" + col), now-k, Bytes.toBytes(-1L)); Bytes.toBytes("col" + col), now - k, Bytes.toBytes(-1L));
writer.append(kv); writer.append(kv);
} }
} }
} }
writer.close(); writer.close();
ReaderContext context = new ReaderContextBuilder() ReaderContext context =
.withFilePath(f) new ReaderContextBuilder().withFilePath(f).withFileSize(fs.getFileStatus(f).getLen())
.withFileSize(fs.getFileStatus(f).getLen()) .withFileSystem(fs).withInputStreamWrapper(new FSDataInputStreamWrapper(fs, f)).build();
.withFileSystem(fs)
.withInputStreamWrapper(new FSDataInputStreamWrapper(fs, f))
.build();
HFileInfo fileInfo = new HFileInfo(context, conf); HFileInfo fileInfo = new HFileInfo(context, conf);
StoreFileReader reader = StoreFileReader reader =
new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf); new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf);
fileInfo.initMetaAndIndex(reader.getHFileReader()); fileInfo.initMetaAndIndex(reader.getHFileReader());
reader.loadFileInfo(); reader.loadFileInfo();
reader.loadBloomfilter(); reader.loadBloomfilter();
@ -780,23 +760,22 @@ public class TestHStoreFile extends HBaseTestCase {
HStore store = mock(HStore.class); HStore store = mock(HStore.class);
when(store.getColumnFamilyDescriptor()) when(store.getColumnFamilyDescriptor())
.thenReturn(ColumnFamilyDescriptorBuilder.of("family")); .thenReturn(ColumnFamilyDescriptorBuilder.of("family"));
// check false positives rate // check false positives rate
int falsePos = 0; int falsePos = 0;
int falseNeg = 0; int falseNeg = 0;
for (int i = 0; i < rowCount*2; ++i) { // rows for (int i = 0; i < rowCount * 2; ++i) { // rows
for (int j = 0; j < colCount*2; ++j) { // column qualifiers for (int j = 0; j < colCount * 2; ++j) { // column qualifiers
String row = String.format(localFormatter, i); String row = String.format(localFormatter, i);
String col = String.format(localFormatter, j); String col = String.format(localFormatter, j);
TreeSet<byte[]> columns = new TreeSet<>(Bytes.BYTES_COMPARATOR); TreeSet<byte[]> columns = new TreeSet<>(Bytes.BYTES_COMPARATOR);
columns.add(Bytes.toBytes("col" + col)); columns.add(Bytes.toBytes("col" + col));
Scan scan = new Scan().withStartRow(Bytes.toBytes(row)) Scan scan =
.withStopRow(Bytes.toBytes(row), true); new Scan().withStartRow(Bytes.toBytes(row)).withStopRow(Bytes.toBytes(row), true);
scan.addColumn(Bytes.toBytes("family"), Bytes.toBytes(("col"+col))); scan.addColumn(Bytes.toBytes("family"), Bytes.toBytes(("col" + col)));
boolean exists = boolean exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);
scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);
boolean shouldRowExist = i % 2 == 0; boolean shouldRowExist = i % 2 == 0;
boolean shouldColExist = j % 2 == 0; boolean shouldColExist = j % 2 == 0;
shouldColExist = shouldColExist || bt[x] == BloomType.ROW; shouldColExist = shouldColExist || bt[x] == BloomType.ROW;
@ -817,25 +796,24 @@ public class TestHStoreFile extends HBaseTestCase {
System.out.println(" False negatives: " + falseNeg); System.out.println(" False negatives: " + falseNeg);
System.out.println(" False positives: " + falsePos); System.out.println(" False positives: " + falsePos);
assertEquals(0, falseNeg); assertEquals(0, falseNeg);
assertTrue(falsePos < 2*expErr[x]); assertTrue(falsePos < 2 * expErr[x]);
} }
} }
@Test @Test
public void testSeqIdComparator() { public void testSeqIdComparator() {
assertOrdering(StoreFileComparators.SEQ_ID, mockStoreFile(true, 100, 1000, -1, "/foo/123"), assertOrdering(StoreFileComparators.SEQ_ID, mockStoreFile(true, 100, 1000, -1, "/foo/123"),
mockStoreFile(true, 100, 1000, -1, "/foo/124"), mockStoreFile(true, 100, 1000, -1, "/foo/124"), mockStoreFile(true, 99, 1000, -1, "/foo/126"),
mockStoreFile(true, 99, 1000, -1, "/foo/126"), mockStoreFile(true, 98, 2000, -1, "/foo/126"), mockStoreFile(false, 3453, -1, 1, "/foo/1"),
mockStoreFile(true, 98, 2000, -1, "/foo/126"), mockStoreFile(false, 3453, -1, 1, "/foo/1"), mockStoreFile(false, 2, -1, 3, "/foo/2"), mockStoreFile(false, 1000, -1, 5, "/foo/2"),
mockStoreFile(false, 2, -1, 3, "/foo/2"), mockStoreFile(false, 1000, -1, 5, "/foo/2"), mockStoreFile(false, 76, -1, 5, "/foo/3"));
mockStoreFile(false, 76, -1, 5, "/foo/3"));
} }
/** /**
* Assert that the given comparator orders the given storefiles in the * Assert that the given comparator orders the given storefiles in the same way that they're
* same way that they're passed. * passed.
*/ */
private void assertOrdering(Comparator<? super HStoreFile> comparator, HStoreFile ... sfs) { private void assertOrdering(Comparator<? super HStoreFile> comparator, HStoreFile... sfs) {
ArrayList<HStoreFile> sorted = Lists.newArrayList(sfs); ArrayList<HStoreFile> sorted = Lists.newArrayList(sfs);
Collections.shuffle(sorted); Collections.shuffle(sorted);
Collections.sort(sorted, comparator); Collections.sort(sorted, comparator);
@ -847,11 +825,8 @@ public class TestHStoreFile extends HBaseTestCase {
/** /**
* Create a mock StoreFile with the given attributes. * Create a mock StoreFile with the given attributes.
*/ */
private HStoreFile mockStoreFile(boolean bulkLoad, private HStoreFile mockStoreFile(boolean bulkLoad, long size, long bulkTimestamp, long seqId,
long size, String path) {
long bulkTimestamp,
long seqId,
String path) {
HStoreFile mock = Mockito.mock(HStoreFile.class); HStoreFile mock = Mockito.mock(HStoreFile.class);
StoreFileReader reader = Mockito.mock(StoreFileReader.class); StoreFileReader reader = Mockito.mock(StoreFileReader.class);
@ -862,10 +837,8 @@ public class TestHStoreFile extends HBaseTestCase {
Mockito.doReturn(OptionalLong.of(bulkTimestamp)).when(mock).getBulkLoadTimestamp(); Mockito.doReturn(OptionalLong.of(bulkTimestamp)).when(mock).getBulkLoadTimestamp();
Mockito.doReturn(seqId).when(mock).getMaxSequenceId(); Mockito.doReturn(seqId).when(mock).getMaxSequenceId();
Mockito.doReturn(new Path(path)).when(mock).getPath(); Mockito.doReturn(new Path(path)).when(mock).getPath();
String name = "mock storefile, bulkLoad=" + bulkLoad + String name = "mock storefile, bulkLoad=" + bulkLoad + " bulkTimestamp=" + bulkTimestamp +
" bulkTimestamp=" + bulkTimestamp + " seqId=" + seqId + " path=" + path;
" seqId=" + seqId +
" path=" + path;
Mockito.doReturn(name).when(mock).toString(); Mockito.doReturn(name).when(mock).toString();
return mock; return mock;
} }
@ -874,14 +847,13 @@ public class TestHStoreFile extends HBaseTestCase {
* Generate a list of KeyValues for testing based on given parameters * Generate a list of KeyValues for testing based on given parameters
* @return the rows key-value list * @return the rows key-value list
*/ */
List<KeyValue> getKeyValueSet(long[] timestamps, int numRows, List<KeyValue> getKeyValueSet(long[] timestamps, int numRows, byte[] qualifier, byte[] family) {
byte[] qualifier, byte[] family) {
List<KeyValue> kvList = new ArrayList<>(); List<KeyValue> kvList = new ArrayList<>();
for (int i=1;i<=numRows;i++) { for (int i = 1; i <= numRows; i++) {
byte[] b = Bytes.toBytes(i) ; byte[] b = Bytes.toBytes(i);
LOG.info(Bytes.toString(b)); LOG.info(Bytes.toString(b));
LOG.info(Bytes.toString(b)); LOG.info(Bytes.toString(b));
for (long timestamp: timestamps) { for (long timestamp : timestamps) {
kvList.add(new KeyValue(b, family, qualifier, timestamp, b)); kvList.add(new KeyValue(b, family, qualifier, timestamp, b));
} }
} }
@ -896,7 +868,7 @@ public class TestHStoreFile extends HBaseTestCase {
byte[] family = Bytes.toBytes("familyname"); byte[] family = Bytes.toBytes("familyname");
byte[] qualifier = Bytes.toBytes("qualifier"); byte[] qualifier = Bytes.toBytes("qualifier");
int numRows = 10; int numRows = 10;
long[] timestamps = new long[] {20,10,5,1}; long[] timestamps = new long[] { 20, 10, 5, 1 };
Scan scan = new Scan(); Scan scan = new Scan();
// Make up a directory hierarchy that has a regiondir ("7e0102") and familyname. // Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
@ -905,12 +877,9 @@ public class TestHStoreFile extends HBaseTestCase {
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build(); HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
// Make a store file and write data to it. // Make a store file and write data to it.
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs) StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
.withOutputDir(dir) .withOutputDir(dir).withFileContext(meta).build();
.withFileContext(meta)
.build();
List<KeyValue> kvList = getKeyValueSet(timestamps,numRows, List<KeyValue> kvList = getKeyValueSet(timestamps, numRows, qualifier, family);
qualifier, family);
for (KeyValue kv : kvList) { for (KeyValue kv : kvList) {
writer.append(kv); writer.append(kv);
@ -918,8 +887,8 @@ public class TestHStoreFile extends HBaseTestCase {
writer.appendMetadata(0, false); writer.appendMetadata(0, false);
writer.close(); writer.close();
HStoreFile hsf = new HStoreFile(this.fs, writer.getPath(), conf, cacheConf, HStoreFile hsf =
BloomType.NONE, true); new HStoreFile(this.fs, writer.getPath(), conf, cacheConf, BloomType.NONE, true);
HStore store = mock(HStore.class); HStore store = mock(HStore.class);
when(store.getColumnFamilyDescriptor()).thenReturn(ColumnFamilyDescriptorBuilder.of(family)); when(store.getColumnFamilyDescriptor()).thenReturn(ColumnFamilyDescriptorBuilder.of(family));
hsf.initReader(); hsf.initReader();
@ -958,7 +927,7 @@ public class TestHStoreFile extends HBaseTestCase {
Configuration conf = this.conf; Configuration conf = this.conf;
// Find a home for our files (regiondir ("7e0102") and familyname). // Find a home for our files (regiondir ("7e0102") and familyname).
Path baseDir = new Path(new Path(testDir, "7e0102"),"twoCOWEOC"); Path baseDir = new Path(new Path(testDir, "7e0102"), "twoCOWEOC");
// Grab the block cache and get the initial hit/miss counts // Grab the block cache and get the initial hit/miss counts
BlockCache bc = BlockCacheFactory.createBlockCache(conf); BlockCache bc = BlockCacheFactory.createBlockCache(conf);
@ -973,8 +942,8 @@ public class TestHStoreFile extends HBaseTestCase {
CacheConfig cacheConf = new CacheConfig(conf, bc); CacheConfig cacheConf = new CacheConfig(conf, bc);
Path pathCowOff = new Path(baseDir, "123456789"); Path pathCowOff = new Path(baseDir, "123456789");
StoreFileWriter writer = writeStoreFile(conf, cacheConf, pathCowOff, 3); StoreFileWriter writer = writeStoreFile(conf, cacheConf, pathCowOff, 3);
HStoreFile hsf = new HStoreFile(this.fs, writer.getPath(), conf, cacheConf, HStoreFile hsf =
BloomType.NONE, true); new HStoreFile(this.fs, writer.getPath(), conf, cacheConf, BloomType.NONE, true);
LOG.debug(hsf.getPath().toString()); LOG.debug(hsf.getPath().toString());
// Read this file, we should see 3 misses // Read this file, we should see 3 misses
@ -998,8 +967,7 @@ public class TestHStoreFile extends HBaseTestCase {
cacheConf = new CacheConfig(conf, bc); cacheConf = new CacheConfig(conf, bc);
Path pathCowOn = new Path(baseDir, "123456788"); Path pathCowOn = new Path(baseDir, "123456788");
writer = writeStoreFile(conf, cacheConf, pathCowOn, 3); writer = writeStoreFile(conf, cacheConf, pathCowOn, 3);
hsf = new HStoreFile(this.fs, writer.getPath(), conf, cacheConf, hsf = new HStoreFile(this.fs, writer.getPath(), conf, cacheConf, BloomType.NONE, true);
BloomType.NONE, true);
// Read this file, we should see 3 hits // Read this file, we should see 3 hits
hsf.initReader(); hsf.initReader();
@ -1036,12 +1004,10 @@ public class TestHStoreFile extends HBaseTestCase {
assertTrue(kv1.equals(kv2)); assertTrue(kv1.equals(kv2));
KeyValue keyv1 = KeyValueUtil.ensureKeyValue(kv1); KeyValue keyv1 = KeyValueUtil.ensureKeyValue(kv1);
KeyValue keyv2 = KeyValueUtil.ensureKeyValue(kv2); KeyValue keyv2 = KeyValueUtil.ensureKeyValue(kv2);
assertTrue(Bytes.compareTo( assertTrue(Bytes.compareTo(keyv1.getBuffer(), keyv1.getKeyOffset(), keyv1.getKeyLength(),
keyv1.getBuffer(), keyv1.getKeyOffset(), keyv1.getKeyLength(), keyv2.getBuffer(), keyv2.getKeyOffset(), keyv2.getKeyLength()) == 0);
keyv2.getBuffer(), keyv2.getKeyOffset(), keyv2.getKeyLength()) == 0); assertTrue(Bytes.compareTo(kv1.getValueArray(), kv1.getValueOffset(), kv1.getValueLength(),
assertTrue(Bytes.compareTo( kv2.getValueArray(), kv2.getValueOffset(), kv2.getValueLength()) == 0);
kv1.getValueArray(), kv1.getValueOffset(), kv1.getValueLength(),
kv2.getValueArray(), kv2.getValueOffset(), kv2.getValueLength()) == 0);
} }
assertNull(scannerTwo.next()); assertNull(scannerTwo.next());
assertEquals(startHit + 6, cs.getHitCount()); assertEquals(startHit + 6, cs.getHitCount());
@ -1082,9 +1048,8 @@ public class TestHStoreFile extends HBaseTestCase {
} }
private Path splitStoreFile(final HRegionFileSystem regionFs, final RegionInfo hri, private Path splitStoreFile(final HRegionFileSystem regionFs, final RegionInfo hri,
final String family, final HStoreFile sf, final byte[] splitKey, boolean isTopRef) final String family, final HStoreFile sf, final byte[] splitKey, boolean isTopRef)
throws IOException { throws IOException {
FileSystem fs = regionFs.getFileSystem();
Path path = regionFs.splitStoreFile(hri, family, sf, splitKey, isTopRef, null); Path path = regionFs.splitStoreFile(hri, family, sf, splitKey, isTopRef, null);
if (null == path) { if (null == path) {
return null; return null;
@ -1094,31 +1059,26 @@ public class TestHStoreFile extends HBaseTestCase {
} }
private StoreFileWriter writeStoreFile(Configuration conf, CacheConfig cacheConf, Path path, private StoreFileWriter writeStoreFile(Configuration conf, CacheConfig cacheConf, Path path,
int numBlocks) throws IOException { int numBlocks) throws IOException {
// Let's put ~5 small KVs in each block, so let's make 5*numBlocks KVs // Let's put ~5 small KVs in each block, so let's make 5*numBlocks KVs
int numKVs = 5 * numBlocks; int numKVs = 5 * numBlocks;
List<KeyValue> kvs = new ArrayList<>(numKVs); List<KeyValue> kvs = new ArrayList<>(numKVs);
byte [] b = Bytes.toBytes("x"); byte[] b = Bytes.toBytes("x");
int totalSize = 0; int totalSize = 0;
for (int i=numKVs;i>0;i--) { for (int i = numKVs; i > 0; i--) {
KeyValue kv = new KeyValue(b, b, b, i, b); KeyValue kv = new KeyValue(b, b, b, i, b);
kvs.add(kv); kvs.add(kv);
// kv has memstoreTS 0, which takes 1 byte to store. // kv has memstoreTS 0, which takes 1 byte to store.
totalSize += kv.getLength() + 1; totalSize += kv.getLength() + 1;
} }
int blockSize = totalSize / numBlocks; int blockSize = totalSize / numBlocks;
HFileContext meta = new HFileContextBuilder().withBlockSize(blockSize) HFileContext meta = new HFileContextBuilder().withBlockSize(blockSize).withChecksumType(CKTYPE)
.withChecksumType(CKTYPE) .withBytesPerCheckSum(CKBYTES).build();
.withBytesPerCheckSum(CKBYTES)
.build();
// Make a store file and write data to it. // Make a store file and write data to it.
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs) StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
.withFilePath(path) .withFilePath(path).withMaxKeyCount(2000).withFileContext(meta).build();
.withMaxKeyCount(2000)
.withFileContext(meta)
.build();
// We'll write N-1 KVs to ensure we don't write an extra block // We'll write N-1 KVs to ensure we don't write an extra block
kvs.remove(kvs.size()-1); kvs.remove(kvs.size() - 1);
for (KeyValue kv : kvs) { for (KeyValue kv : kvs) {
writer.append(kv); writer.append(kv);
} }
@ -1128,8 +1088,7 @@ public class TestHStoreFile extends HBaseTestCase {
} }
/** /**
* Check if data block encoding information is saved correctly in HFile's * Check if data block encoding information is saved correctly in HFile's file info.
* file info.
*/ */
@Test @Test
public void testDataBlockEncodingMetaData() throws IOException { public void testDataBlockEncodingMetaData() throws IOException {
@ -1137,32 +1096,23 @@ public class TestHStoreFile extends HBaseTestCase {
Path dir = new Path(new Path(testDir, "7e0102"), "familyname"); Path dir = new Path(new Path(testDir, "7e0102"), "familyname");
Path path = new Path(dir, "1234567890"); Path path = new Path(dir, "1234567890");
DataBlockEncoding dataBlockEncoderAlgo = DataBlockEncoding dataBlockEncoderAlgo = DataBlockEncoding.FAST_DIFF;
DataBlockEncoding.FAST_DIFF;
HFileDataBlockEncoder dataBlockEncoder =
new HFileDataBlockEncoderImpl(
dataBlockEncoderAlgo);
cacheConf = new CacheConfig(conf); cacheConf = new CacheConfig(conf);
HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL) HFileContext meta =
.withChecksumType(CKTYPE) new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL).withChecksumType(CKTYPE)
.withBytesPerCheckSum(CKBYTES) .withBytesPerCheckSum(CKBYTES).withDataBlockEncoding(dataBlockEncoderAlgo).build();
.withDataBlockEncoding(dataBlockEncoderAlgo)
.build();
// Make a store file and write data to it. // Make a store file and write data to it.
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs) StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
.withFilePath(path) .withFilePath(path).withMaxKeyCount(2000).withFileContext(meta).build();
.withMaxKeyCount(2000)
.withFileContext(meta)
.build();
writer.close(); writer.close();
HStoreFile storeFile = HStoreFile storeFile =
new HStoreFile(fs, writer.getPath(), conf, cacheConf, BloomType.NONE, true); new HStoreFile(fs, writer.getPath(), conf, cacheConf, BloomType.NONE, true);
storeFile.initReader(); storeFile.initReader();
StoreFileReader reader = storeFile.getReader(); StoreFileReader reader = storeFile.getReader();
Map<byte[], byte[]> fileInfo = reader.loadFileInfo(); Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING); byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);
assertEquals(dataBlockEncoderAlgo.getNameInBytes(), value); assertArrayEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
} }
} }

View File

@ -17,6 +17,10 @@
*/ */
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertThrows;
import static org.junit.Assert.assertTrue;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Arrays; import java.util.Arrays;
@ -24,23 +28,21 @@ import java.util.List;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellComparatorImpl; import org.apache.hadoop.hbase.CellComparatorImpl;
import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.CollectionBackedScanner; import org.apache.hadoop.hbase.util.CollectionBackedScanner;
import org.junit.Before;
import org.junit.ClassRule; import org.junit.ClassRule;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@Category({RegionServerTests.class, SmallTests.class}) @Category({ RegionServerTests.class, SmallTests.class })
public class TestKeyValueHeap extends HBaseTestCase { public class TestKeyValueHeap {
@ClassRule @ClassRule
public static final HBaseClassTestRule CLASS_RULE = public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestKeyValueHeap.class); HBaseClassTestRule.forClass(TestKeyValueHeap.class);
private byte[] row1 = Bytes.toBytes("row1"); private byte[] row1 = Bytes.toBytes("row1");
private byte[] fam1 = Bytes.toBytes("fam1"); private byte[] fam1 = Bytes.toBytes("fam1");
@ -74,67 +76,59 @@ public class TestKeyValueHeap extends HBaseTestCase {
List<KeyValueScanner> scanners = new ArrayList<>(Arrays.asList(s1, s2, s3)); List<KeyValueScanner> scanners = new ArrayList<>(Arrays.asList(s1, s2, s3));
/* /*
* Uses {@code scanners} to build a KeyValueHeap, iterates over it and asserts that returned * Uses {@code scanners} to build a KeyValueHeap, iterates over it and asserts that returned Cells
* Cells are same as {@code expected}. * are same as {@code expected}.
* @return List of Cells returned from scanners. * @return List of Cells returned from scanners.
*/ */
public List<Cell> assertCells(List<Cell> expected, List<KeyValueScanner> scanners) public List<Cell> assertCells(List<Cell> expected, List<KeyValueScanner> scanners)
throws IOException { throws IOException {
//Creating KeyValueHeap // Creating KeyValueHeap
KeyValueHeap kvh = new KeyValueHeap(scanners, CellComparatorImpl.COMPARATOR); try (KeyValueHeap kvh = new KeyValueHeap(scanners, CellComparatorImpl.COMPARATOR)) {
List<Cell> actual = new ArrayList<>();
while (kvh.peek() != null) {
actual.add(kvh.next());
}
List<Cell> actual = new ArrayList<>(); assertEquals(expected, actual);
while(kvh.peek() != null){ return actual;
actual.add(kvh.next());
} }
assertEquals(expected, actual);
return actual;
}
@Override
@Before
public void setUp() throws Exception {
super.setUp();
} }
@Test @Test
public void testSorted() throws IOException{ public void testSorted() throws IOException {
//Cases that need to be checked are: // Cases that need to be checked are:
//1. The "smallest" Cell is in the same scanners as current // 1. The "smallest" Cell is in the same scanners as current
//2. Current scanner gets empty // 2. Current scanner gets empty
List<Cell> expected = Arrays.asList( List<Cell> expected =
kv111, kv112, kv113, kv114, kv115, kv121, kv122, kv211, kv212, kv213); Arrays.asList(kv111, kv112, kv113, kv114, kv115, kv121, kv122, kv211, kv212, kv213);
List<Cell> actual = assertCells(expected, scanners); List<Cell> actual = assertCells(expected, scanners);
//Check if result is sorted according to Comparator // Check if result is sorted according to Comparator
for(int i=0; i<actual.size()-1; i++){ for (int i = 0; i < actual.size() - 1; i++) {
int ret = CellComparatorImpl.COMPARATOR.compare(actual.get(i), actual.get(i+1)); int ret = CellComparatorImpl.COMPARATOR.compare(actual.get(i), actual.get(i + 1));
assertTrue(ret < 0); assertTrue(ret < 0);
} }
} }
@Test @Test
public void testSeek() throws IOException { public void testSeek() throws IOException {
//Cases: // Cases:
//1. Seek Cell that is not in scanner // 1. Seek Cell that is not in scanner
//2. Check that smallest that is returned from a seek is correct // 2. Check that smallest that is returned from a seek is correct
List<Cell> expected = Arrays.asList(kv211); List<Cell> expected = Arrays.asList(kv211);
//Creating KeyValueHeap // Creating KeyValueHeap
KeyValueHeap kvh = try (KeyValueHeap kvh = new KeyValueHeap(scanners, CellComparatorImpl.COMPARATOR)) {
new KeyValueHeap(scanners, CellComparatorImpl.COMPARATOR); Cell seekKv = new KeyValue(row2, fam1, null, null);
kvh.seek(seekKv);
Cell seekKv = new KeyValue(row2, fam1, null, null); List<Cell> actual = Arrays.asList(kvh.peek());
kvh.seek(seekKv);
List<Cell> actual = Arrays.asList(kvh.peek()); assertEquals("Expected = " + Arrays.toString(expected.toArray()) + "\n Actual = " +
Arrays.toString(actual.toArray()), expected, actual);
assertEquals("Expected = " + Arrays.toString(expected.toArray()) }
+ "\n Actual = " + Arrays.toString(actual.toArray()), expected, actual);
} }
@Test @Test
@ -144,20 +138,25 @@ public class TestKeyValueHeap extends HBaseTestCase {
TestScanner s4 = new TestScanner(new ArrayList<>()); TestScanner s4 = new TestScanner(new ArrayList<>());
scanners.add(s4); scanners.add(s4);
//Creating KeyValueHeap // Creating KeyValueHeap
KeyValueHeap kvh = new KeyValueHeap(scanners, CellComparatorImpl.COMPARATOR); try (KeyValueHeap kvh = new KeyValueHeap(scanners, CellComparatorImpl.COMPARATOR)) {
for (;;) {
if (kvh.next() == null) {
break;
}
}
// Once the internal scanners go out of Cells, those will be removed from KVHeap's priority
// queue and added to a Set for lazy close. The actual close will happen only on
// KVHeap#close()
assertEquals(4, kvh.scannersForDelayedClose.size());
assertTrue(kvh.scannersForDelayedClose.contains(s1));
assertTrue(kvh.scannersForDelayedClose.contains(s2));
assertTrue(kvh.scannersForDelayedClose.contains(s3));
assertTrue(kvh.scannersForDelayedClose.contains(s4));
}
while(kvh.next() != null); for (KeyValueScanner scanner : scanners) {
// Once the internal scanners go out of Cells, those will be removed from KVHeap's priority assertTrue(((TestScanner) scanner).isClosed());
// queue and added to a Set for lazy close. The actual close will happen only on KVHeap#close()
assertEquals(4, kvh.scannersForDelayedClose.size());
assertTrue(kvh.scannersForDelayedClose.contains(s1));
assertTrue(kvh.scannersForDelayedClose.contains(s2));
assertTrue(kvh.scannersForDelayedClose.contains(s3));
assertTrue(kvh.scannersForDelayedClose.contains(s4));
kvh.close();
for(KeyValueScanner scanner : scanners) {
assertTrue(((TestScanner)scanner).isClosed());
} }
} }
@ -173,19 +172,19 @@ public class TestKeyValueHeap extends HBaseTestCase {
List<KeyValueScanner> scanners = new ArrayList<>(Arrays.asList(s1, s2, s3, s4)); List<KeyValueScanner> scanners = new ArrayList<>(Arrays.asList(s1, s2, s3, s4));
// Creating KeyValueHeap // Creating KeyValueHeap
KeyValueHeap kvh = new KeyValueHeap(scanners, CellComparatorImpl.COMPARATOR); try (KeyValueHeap kvh = new KeyValueHeap(scanners, CellComparatorImpl.COMPARATOR)) {
try {
for (KeyValueScanner scanner : scanners) { for (KeyValueScanner scanner : scanners) {
((SeekTestScanner) scanner).setRealSeekDone(false); ((SeekTestScanner) scanner).setRealSeekDone(false);
} }
while (kvh.next() != null);
// The pollRealKV should throw IOE. // The pollRealKV should throw IOE.
assertTrue(false); assertThrows(IOException.class, () -> {
} catch (IOException ioe) { for (;;) {
kvh.close(); if (kvh.next() == null) {
break;
}
}
});
} }
// It implies there is no NPE thrown from kvh.close() if getting here // It implies there is no NPE thrown from kvh.close() if getting here
for (KeyValueScanner scanner : scanners) { for (KeyValueScanner scanner : scanners) {
// Verify that close is called and only called once for each scanner // Verify that close is called and only called once for each scanner
@ -198,18 +197,15 @@ public class TestKeyValueHeap extends HBaseTestCase {
public void testPriorityId() throws IOException { public void testPriorityId() throws IOException {
Cell kv113A = new KeyValue(row1, fam1, col3, Bytes.toBytes("aaa")); Cell kv113A = new KeyValue(row1, fam1, col3, Bytes.toBytes("aaa"));
Cell kv113B = new KeyValue(row1, fam1, col3, Bytes.toBytes("bbb")); Cell kv113B = new KeyValue(row1, fam1, col3, Bytes.toBytes("bbb"));
{ TestScanner scan1 = new TestScanner(Arrays.asList(kv111, kv112, kv113A), 1);
TestScanner scan1 = new TestScanner(Arrays.asList(kv111, kv112, kv113A), 1); TestScanner scan2 = new TestScanner(Arrays.asList(kv113B), 2);
TestScanner scan2 = new TestScanner(Arrays.asList(kv113B), 2); List<Cell> expected = Arrays.asList(kv111, kv112, kv113B, kv113A);
List<Cell> expected = Arrays.asList(kv111, kv112, kv113B, kv113A); assertCells(expected, Arrays.asList(scan1, scan2));
assertCells(expected, new ArrayList<>(Arrays.asList(scan1, scan2)));
} scan1 = new TestScanner(Arrays.asList(kv111, kv112, kv113A), 2);
{ scan2 = new TestScanner(Arrays.asList(kv113B), 1);
TestScanner scan1 = new TestScanner(Arrays.asList(kv111, kv112, kv113A), 2); expected = Arrays.asList(kv111, kv112, kv113A, kv113B);
TestScanner scan2 = new TestScanner(Arrays.asList(kv113B), 1); assertCells(expected, Arrays.asList(scan1, scan2));
List<Cell> expected = Arrays.asList(kv111, kv112, kv113A, kv113B);
assertCells(expected, new ArrayList<>(Arrays.asList(scan1, scan2)));
}
} }
private static class TestScanner extends CollectionBackedScanner { private static class TestScanner extends CollectionBackedScanner {
@ -231,7 +227,7 @@ public class TestKeyValueHeap extends HBaseTestCase {
} }
@Override @Override
public void close(){ public void close() {
closed = true; closed = true;
} }

View File

@ -37,11 +37,11 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HTestConst;
import org.apache.hadoop.hbase.KeepDeletedCells; import org.apache.hadoop.hbase.KeepDeletedCells;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Delete;
@ -76,28 +76,30 @@ import org.slf4j.LoggerFactory;
/** /**
* Test major compactions * Test major compactions
*/ */
@Category({RegionServerTests.class, LargeTests.class}) @Category({ RegionServerTests.class, LargeTests.class })
@RunWith(Parameterized.class) @RunWith(Parameterized.class)
public class TestMajorCompaction { public class TestMajorCompaction {
@ClassRule @ClassRule
public static final HBaseClassTestRule CLASS_RULE = public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestMajorCompaction.class); HBaseClassTestRule.forClass(TestMajorCompaction.class);
@Parameterized.Parameters @Parameterized.Parameters
public static Object[] data() { public static Object[] data() {
return new Object[] { "NONE", "BASIC", "EAGER" }; return new Object[] { "NONE", "BASIC", "EAGER" };
} }
@Rule public TestName name;
@Rule
public TestName name;
private static final Logger LOG = LoggerFactory.getLogger(TestMajorCompaction.class.getName()); private static final Logger LOG = LoggerFactory.getLogger(TestMajorCompaction.class.getName());
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility(); private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
protected Configuration conf = UTIL.getConfiguration(); protected Configuration conf = UTIL.getConfiguration();
private HRegion r = null; private HRegion r = null;
private HTableDescriptor htd = null; private HTableDescriptor htd = null;
private static final byte [] COLUMN_FAMILY = fam1; private static final byte[] COLUMN_FAMILY = fam1;
private final byte [] STARTROW = Bytes.toBytes(START_KEY); private final byte[] STARTROW = Bytes.toBytes(START_KEY);
private static final byte [] COLUMN_FAMILY_TEXT = COLUMN_FAMILY; private static final byte[] COLUMN_FAMILY_TEXT = COLUMN_FAMILY;
private int compactionThreshold; private int compactionThreshold;
private byte[] secondRowBytes, thirdRowBytes; private byte[] secondRowBytes, thirdRowBytes;
private static final long MAX_FILES_TO_COMPACT = 10; private static final long MAX_FILES_TO_COMPACT = 10;
@ -107,7 +109,7 @@ public class TestMajorCompaction {
super(); super();
name = new TestName(); name = new TestName();
// Set cache flush size to 1MB // Set cache flush size to 1MB
conf.setInt(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, 1024*1024); conf.setInt(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, 1024 * 1024);
conf.setInt(HConstants.HREGION_MEMSTORE_BLOCK_MULTIPLIER, 100); conf.setInt(HConstants.HREGION_MEMSTORE_BLOCK_MULTIPLIER, 100);
compactionThreshold = conf.getInt("hbase.hstore.compactionThreshold", 3); compactionThreshold = conf.getInt("hbase.hstore.compactionThreshold", 3);
conf.set(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_KEY, String.valueOf(compType)); conf.set(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_KEY, String.valueOf(compType));
@ -117,13 +119,13 @@ public class TestMajorCompaction {
secondRowBytes[START_KEY_BYTES.length - 1]++; secondRowBytes[START_KEY_BYTES.length - 1]++;
thirdRowBytes = START_KEY_BYTES.clone(); thirdRowBytes = START_KEY_BYTES.clone();
thirdRowBytes[START_KEY_BYTES.length - 1] = thirdRowBytes[START_KEY_BYTES.length - 1] =
(byte) (thirdRowBytes[START_KEY_BYTES.length - 1] + 2); (byte) (thirdRowBytes[START_KEY_BYTES.length - 1] + 2);
} }
@Before @Before
public void setUp() throws Exception { public void setUp() throws Exception {
this.htd = UTIL.createTableDescriptor( this.htd = UTIL.createTableDescriptor(
TableName.valueOf(name.getMethodName().replace('[','i').replace(']','i')), TableName.valueOf(name.getMethodName().replace('[', 'i').replace(']', 'i')),
HColumnDescriptor.DEFAULT_MIN_VERSIONS, 3, HConstants.FOREVER, HColumnDescriptor.DEFAULT_MIN_VERSIONS, 3, HConstants.FOREVER,
HColumnDescriptor.DEFAULT_KEEP_DELETED); HColumnDescriptor.DEFAULT_KEEP_DELETED);
this.r = UTIL.createLocalHRegion(htd, null, null); this.r = UTIL.createLocalHRegion(htd, null, null);
@ -131,15 +133,15 @@ public class TestMajorCompaction {
@After @After
public void tearDown() throws Exception { public void tearDown() throws Exception {
WAL wal = ((HRegion)r).getWAL(); WAL wal = ((HRegion) r).getWAL();
((HRegion)r).close(); ((HRegion) r).close();
wal.close(); wal.close();
} }
/** /**
* Test that on a major compaction, if all cells are expired or deleted, then * Test that on a major compaction, if all cells are expired or deleted, then we'll end up with no
* we'll end up with no product. Make sure scanner over region returns * product. Make sure scanner over region returns right answer in this case - and that it just
* right answer in this case - and that it just basically works. * basically works.
* @throws IOException exception encountered * @throws IOException exception encountered
*/ */
@Test @Test
@ -157,8 +159,7 @@ public class TestMajorCompaction {
} }
/** /**
* Run compaction and flushing memstore * Run compaction and flushing memstore Assert deletes get cleaned up.
* Assert deletes get cleaned up.
* @throws Exception * @throws Exception
*/ */
@Test @Test
@ -176,23 +177,21 @@ public class TestMajorCompaction {
majorCompactionWithDataBlockEncoding(false); majorCompactionWithDataBlockEncoding(false);
} }
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly) public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly) throws Exception {
throws Exception {
Map<HStore, HFileDataBlockEncoder> replaceBlockCache = new HashMap<>(); Map<HStore, HFileDataBlockEncoder> replaceBlockCache = new HashMap<>();
for (HStore store : r.getStores()) { for (HStore store : r.getStores()) {
HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder(); HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
replaceBlockCache.put(store, blockEncoder); replaceBlockCache.put(store, blockEncoder);
final DataBlockEncoding inCache = DataBlockEncoding.PREFIX; final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE : final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE : inCache;
inCache; ((HStore) store).setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(onDisk));
((HStore)store).setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(onDisk));
} }
majorCompaction(); majorCompaction();
// restore settings // restore settings
for (Entry<HStore, HFileDataBlockEncoder> entry : replaceBlockCache.entrySet()) { for (Entry<HStore, HFileDataBlockEncoder> entry : replaceBlockCache.entrySet()) {
((HStore)entry.getKey()).setDataBlockEncoderInTest(entry.getValue()); ((HStore) entry.getKey()).setDataBlockEncoderInTest(entry.getValue());
} }
} }
@ -202,7 +201,7 @@ public class TestMajorCompaction {
createStoreFile(r); createStoreFile(r);
} }
// Add more content. // Add more content.
HBaseTestCase.addContent(new RegionAsTable(r), Bytes.toString(COLUMN_FAMILY)); HTestConst.addContent(new RegionAsTable(r), Bytes.toString(COLUMN_FAMILY));
// Now there are about 5 versions of each column. // Now there are about 5 versions of each column.
// Default is that there only 3 (MAXVERSIONS) versions allowed per column. // Default is that there only 3 (MAXVERSIONS) versions allowed per column.
@ -223,7 +222,7 @@ public class TestMajorCompaction {
int storeCount = 0; int storeCount = 0;
for (HStore store : r.getStores()) { for (HStore store : r.getStores()) {
CompactionProgress progress = store.getCompactionProgress(); CompactionProgress progress = store.getCompactionProgress();
if( progress != null ) { if (progress != null) {
++storeCount; ++storeCount;
assertTrue(progress.currentCompactedKVs > 0); assertTrue(progress.currentCompactedKVs > 0);
assertTrue(progress.getTotalCompactingKVs() > 0); assertTrue(progress.getTotalCompactingKVs() > 0);
@ -233,25 +232,24 @@ public class TestMajorCompaction {
// look at the second row // look at the second row
// Increment the least significant character so we get to next row. // Increment the least significant character so we get to next row.
byte [] secondRowBytes = START_KEY_BYTES.clone(); byte[] secondRowBytes = START_KEY_BYTES.clone();
secondRowBytes[START_KEY_BYTES.length - 1]++; secondRowBytes[START_KEY_BYTES.length - 1]++;
// Always 3 versions if that is what max versions is. // Always 3 versions if that is what max versions is.
result = r.get(new Get(secondRowBytes).addFamily(COLUMN_FAMILY_TEXT).readVersions(100)); result = r.get(new Get(secondRowBytes).addFamily(COLUMN_FAMILY_TEXT).readVersions(100));
LOG.debug("Row " + Bytes.toStringBinary(secondRowBytes) + " after " + LOG.debug(
"initial compaction: " + result); "Row " + Bytes.toStringBinary(secondRowBytes) + " after " + "initial compaction: " + result);
assertEquals("Invalid number of versions of row " assertEquals("Invalid number of versions of row " + Bytes.toStringBinary(secondRowBytes) + ".",
+ Bytes.toStringBinary(secondRowBytes) + ".", compactionThreshold, compactionThreshold, result.size());
result.size());
// Now add deletes to memstore and then flush it. // Now add deletes to memstore and then flush it.
// That will put us over // That will put us over
// the compaction threshold of 3 store files. Compacting these store files // the compaction threshold of 3 store files. Compacting these store files
// should result in a compacted store file that has no references to the // should result in a compacted store file that has no references to the
// deleted row. // deleted row.
LOG.debug("Adding deletes to memstore and flushing"); LOG.debug("Adding deletes to memstore and flushing");
Delete delete = new Delete(secondRowBytes, System.currentTimeMillis()); Delete delete = new Delete(secondRowBytes, System.currentTimeMillis());
byte [][] famAndQf = {COLUMN_FAMILY, null}; byte[][] famAndQf = { COLUMN_FAMILY, null };
delete.addFamily(famAndQf[0]); delete.addFamily(famAndQf[0]);
r.delete(delete); r.delete(delete);
@ -264,7 +262,7 @@ public class TestMajorCompaction {
result = r.get(new Get(secondRowBytes).addFamily(COLUMN_FAMILY_TEXT).readVersions(100)); result = r.get(new Get(secondRowBytes).addFamily(COLUMN_FAMILY_TEXT).readVersions(100));
assertTrue("Second row should have been deleted", result.isEmpty()); assertTrue("Second row should have been deleted", result.isEmpty());
// Add a bit of data and flush. Start adding at 'bbb'. // Add a bit of data and flush. Start adding at 'bbb'.
createSmallerStoreFile(this.r); createSmallerStoreFile(this.r);
r.flush(true); r.flush(true);
// Assert that the second row is still deleted. // Assert that the second row is still deleted.
@ -281,7 +279,7 @@ public class TestMajorCompaction {
// Make sure the store files do have some 'aaa' keys in them -- exactly 3. // Make sure the store files do have some 'aaa' keys in them -- exactly 3.
// Also, that compacted store files do not have any secondRowBytes because // Also, that compacted store files do not have any secondRowBytes because
// they were deleted. // they were deleted.
verifyCounts(3,0); verifyCounts(3, 0);
// Multiple versions allowed for an entry, so the delete isn't enough // Multiple versions allowed for an entry, so the delete isn't enough
// Lower TTL and expire to ensure that all our entries have been wiped // Lower TTL and expire to ensure that all our entries have been wiped
@ -319,8 +317,8 @@ public class TestMajorCompaction {
assertEquals(2, s.getStorefilesCount()); assertEquals(2, s.getStorefilesCount());
// ensure that major compaction time is deterministic // ensure that major compaction time is deterministic
RatioBasedCompactionPolicy RatioBasedCompactionPolicy c =
c = (RatioBasedCompactionPolicy)s.storeEngine.getCompactionPolicy(); (RatioBasedCompactionPolicy) s.storeEngine.getCompactionPolicy();
Collection<HStoreFile> storeFiles = s.getStorefiles(); Collection<HStoreFile> storeFiles = s.getStorefiles();
long mcTime = c.getNextMajorCompactTime(storeFiles); long mcTime = c.getNextMajorCompactTime(storeFiles);
for (int i = 0; i < 10; ++i) { for (int i = 0; i < 10; ++i) {
@ -339,7 +337,7 @@ public class TestMajorCompaction {
assertEquals(1, s.getStorefilesCount()); assertEquals(1, s.getStorefilesCount());
} finally { } finally {
// reset the timed compaction settings // reset the timed compaction settings
conf.setLong(HConstants.MAJOR_COMPACTION_PERIOD, 1000*60*60*24); conf.setLong(HConstants.MAJOR_COMPACTION_PERIOD, 1000 * 60 * 60 * 24);
conf.setFloat("hbase.hregion.majorcompaction.jitter", 0.20F); conf.setFloat("hbase.hregion.majorcompaction.jitter", 0.20F);
// run a major to reset the cache // run a major to reset the cache
createStoreFile(r); createStoreFile(r);
@ -351,33 +349,32 @@ public class TestMajorCompaction {
private void verifyCounts(int countRow1, int countRow2) throws Exception { private void verifyCounts(int countRow1, int countRow2) throws Exception {
int count1 = 0; int count1 = 0;
int count2 = 0; int count2 = 0;
for (HStoreFile f: r.getStore(COLUMN_FAMILY_TEXT).getStorefiles()) { for (HStoreFile f : r.getStore(COLUMN_FAMILY_TEXT).getStorefiles()) {
HFileScanner scanner = f.getReader().getScanner(false, false); HFileScanner scanner = f.getReader().getScanner(false, false);
scanner.seekTo(); scanner.seekTo();
do { do {
byte [] row = CellUtil.cloneRow(scanner.getCell()); byte[] row = CellUtil.cloneRow(scanner.getCell());
if (Bytes.equals(row, STARTROW)) { if (Bytes.equals(row, STARTROW)) {
count1++; count1++;
} else if(Bytes.equals(row, secondRowBytes)) { } else if (Bytes.equals(row, secondRowBytes)) {
count2++; count2++;
} }
} while(scanner.next()); } while (scanner.next());
} }
assertEquals(countRow1,count1); assertEquals(countRow1, count1);
assertEquals(countRow2,count2); assertEquals(countRow2, count2);
} }
private int count() throws IOException { private int count() throws IOException {
int count = 0; int count = 0;
for (HStoreFile f: r.getStore(COLUMN_FAMILY_TEXT).getStorefiles()) { for (HStoreFile f : r.getStore(COLUMN_FAMILY_TEXT).getStorefiles()) {
HFileScanner scanner = f.getReader().getScanner(false, false); HFileScanner scanner = f.getReader().getScanner(false, false);
if (!scanner.seekTo()) { if (!scanner.seekTo()) {
continue; continue;
} }
do { do {
count++; count++;
} while(scanner.next()); } while (scanner.next());
} }
return count; return count;
} }
@ -388,14 +385,13 @@ public class TestMajorCompaction {
private void createStoreFile(final HRegion region, String family) throws IOException { private void createStoreFile(final HRegion region, String family) throws IOException {
Table loader = new RegionAsTable(region); Table loader = new RegionAsTable(region);
HBaseTestCase.addContent(loader, family); HTestConst.addContent(loader, family);
region.flush(true); region.flush(true);
} }
private void createSmallerStoreFile(final HRegion region) throws IOException { private void createSmallerStoreFile(final HRegion region) throws IOException {
Table loader = new RegionAsTable(region); Table loader = new RegionAsTable(region);
HBaseTestCase.addContent(loader, Bytes.toString(COLUMN_FAMILY), Bytes.toBytes("" + HTestConst.addContent(loader, Bytes.toString(COLUMN_FAMILY), Bytes.toBytes("" + "bbb"), null);
"bbb"), null);
region.flush(true); region.flush(true);
} }
@ -414,8 +410,7 @@ public class TestMajorCompaction {
CompactionRequestImpl request = store.requestCompaction().get().getRequest(); CompactionRequestImpl request = store.requestCompaction().get().getRequest();
assertNotNull("Expected to receive a compaction request", request); assertNotNull("Expected to receive a compaction request", request);
assertEquals( assertEquals(
"System-requested major compaction should not occur if there are too many store files", "System-requested major compaction should not occur if there are too many store files", false,
false,
request.isMajor()); request.isMajor());
} }
@ -423,21 +418,19 @@ public class TestMajorCompaction {
* Test for HBASE-5920 * Test for HBASE-5920
*/ */
@Test @Test
public void testUserMajorCompactionRequest() throws IOException{ public void testUserMajorCompactionRequest() throws IOException {
HStore store = r.getStore(COLUMN_FAMILY); HStore store = r.getStore(COLUMN_FAMILY);
createStoreFile(r); createStoreFile(r);
for (int i = 0; i < MAX_FILES_TO_COMPACT + 1; i++) { for (int i = 0; i < MAX_FILES_TO_COMPACT + 1; i++) {
createStoreFile(r); createStoreFile(r);
} }
store.triggerMajorCompaction(); store.triggerMajorCompaction();
CompactionRequestImpl request = CompactionRequestImpl request = store
store.requestCompaction(PRIORITY_USER, CompactionLifeCycleTracker.DUMMY, null).get() .requestCompaction(PRIORITY_USER, CompactionLifeCycleTracker.DUMMY, null).get().getRequest();
.getRequest();
assertNotNull("Expected to receive a compaction request", request); assertNotNull("Expected to receive a compaction request", request);
assertEquals( assertEquals(
"User-requested major compaction should always occur, even if there are too many store files", "User-requested major compaction should always occur, even if there are too many store files",
true, true, request.isMajor());
request.isMajor());
} }
/** /**
@ -487,7 +480,7 @@ public class TestMajorCompaction {
} }
private void testMajorCompactingWithDeletes(KeepDeletedCells keepDeletedCells) private void testMajorCompactingWithDeletes(KeepDeletedCells keepDeletedCells)
throws IOException { throws IOException {
createStoreFile(r); createStoreFile(r);
for (int i = 0; i < compactionThreshold; i++) { for (int i = 0; i < compactionThreshold; i++) {
createStoreFile(r); createStoreFile(r);

View File

@ -22,13 +22,14 @@ import static org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
import static org.apache.hadoop.hbase.HBaseTestingUtility.fam2; import static org.apache.hadoop.hbase.HBaseTestingUtility.fam2;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HTestConst;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Get;
@ -174,13 +175,13 @@ public class TestMinorCompaction {
throws Exception { throws Exception {
Table loader = new RegionAsTable(r); Table loader = new RegionAsTable(r);
for (int i = 0; i < compactionThreshold + 1; i++) { for (int i = 0; i < compactionThreshold + 1; i++) {
HBaseTestCase.addContent(loader, Bytes.toString(fam1), Bytes.toString(col1), firstRowBytes, HTestConst.addContent(loader, Bytes.toString(fam1), Bytes.toString(col1), firstRowBytes,
thirdRowBytes, i); thirdRowBytes, i);
HBaseTestCase.addContent(loader, Bytes.toString(fam1), Bytes.toString(col2), firstRowBytes, HTestConst.addContent(loader, Bytes.toString(fam1), Bytes.toString(col2), firstRowBytes,
thirdRowBytes, i); thirdRowBytes, i);
HBaseTestCase.addContent(loader, Bytes.toString(fam2), Bytes.toString(col1), firstRowBytes, HTestConst.addContent(loader, Bytes.toString(fam2), Bytes.toString(col1), firstRowBytes,
thirdRowBytes, i); thirdRowBytes, i);
HBaseTestCase.addContent(loader, Bytes.toString(fam2), Bytes.toString(col2), firstRowBytes, HTestConst.addContent(loader, Bytes.toString(fam2), Bytes.toString(col2), firstRowBytes,
thirdRowBytes, i); thirdRowBytes, i);
r.flush(true); r.flush(true);
} }

View File

@ -32,10 +32,10 @@ import java.util.List;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HTestConst;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.UnknownScannerException; import org.apache.hadoop.hbase.UnknownScannerException;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
@ -133,7 +133,7 @@ public class TestScanner {
byte [] stoprow = Bytes.toBytes("ccc"); byte [] stoprow = Bytes.toBytes("ccc");
try { try {
this.region = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null); this.region = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
HBaseTestCase.addContent(this.region, HConstants.CATALOG_FAMILY); HTestConst.addContent(this.region, HConstants.CATALOG_FAMILY);
List<Cell> results = new ArrayList<>(); List<Cell> results = new ArrayList<>();
// Do simple test of getting one row only first. // Do simple test of getting one row only first.
Scan scan = new Scan().withStartRow(Bytes.toBytes("abc")) Scan scan = new Scan().withStartRow(Bytes.toBytes("abc"))
@ -207,7 +207,7 @@ public class TestScanner {
public void testFilters() throws IOException { public void testFilters() throws IOException {
try { try {
this.region = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null); this.region = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
HBaseTestCase.addContent(this.region, HConstants.CATALOG_FAMILY); HTestConst.addContent(this.region, HConstants.CATALOG_FAMILY);
byte [] prefix = Bytes.toBytes("ab"); byte [] prefix = Bytes.toBytes("ab");
Filter newFilter = new PrefixFilter(prefix); Filter newFilter = new PrefixFilter(prefix);
Scan scan = new Scan(); Scan scan = new Scan();
@ -233,7 +233,7 @@ public class TestScanner {
public void testRaceBetweenClientAndTimeout() throws Exception { public void testRaceBetweenClientAndTimeout() throws Exception {
try { try {
this.region = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null); this.region = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
HBaseTestCase.addContent(this.region, HConstants.CATALOG_FAMILY); HTestConst.addContent(this.region, HConstants.CATALOG_FAMILY);
Scan scan = new Scan(); Scan scan = new Scan();
InternalScanner s = region.getScanner(scan); InternalScanner s = region.getScanner(scan);
List<Cell> results = new ArrayList<>(); List<Cell> results = new ArrayList<>();
@ -465,7 +465,7 @@ public class TestScanner {
Table hri = new RegionAsTable(region); Table hri = new RegionAsTable(region);
try { try {
LOG.info("Added: " + LOG.info("Added: " +
HBaseTestCase.addContent(hri, Bytes.toString(HConstants.CATALOG_FAMILY), HTestConst.addContent(hri, Bytes.toString(HConstants.CATALOG_FAMILY),
Bytes.toString(HConstants.REGIONINFO_QUALIFIER))); Bytes.toString(HConstants.REGIONINFO_QUALIFIER)));
int count = count(hri, -1, false); int count = count(hri, -1, false);
assertEquals(count, count(hri, 100, false)); // do a sync flush. assertEquals(count, count(hri, 100, false)); // do a sync flush.
@ -487,7 +487,7 @@ public class TestScanner {
Table hri = new RegionAsTable(region); Table hri = new RegionAsTable(region);
try { try {
LOG.info("Added: " + LOG.info("Added: " +
HBaseTestCase.addContent(hri, Bytes.toString(HConstants.CATALOG_FAMILY), HTestConst.addContent(hri, Bytes.toString(HConstants.CATALOG_FAMILY),
Bytes.toString(HConstants.REGIONINFO_QUALIFIER))); Bytes.toString(HConstants.REGIONINFO_QUALIFIER)));
int count = count(hri, -1, false); int count = count(hri, -1, false);
assertEquals(count, count(hri, 100, true)); // do a true concurrent background thread flush assertEquals(count, count(hri, 100, true)); // do a true concurrent background thread flush
@ -513,9 +513,9 @@ public class TestScanner {
Table hri = new RegionAsTable(region); Table hri = new RegionAsTable(region);
try { try {
HBaseTestCase.addContent(hri, Bytes.toString(fam1), Bytes.toString(col1), HTestConst.addContent(hri, Bytes.toString(fam1), Bytes.toString(col1),
firstRowBytes, secondRowBytes); firstRowBytes, secondRowBytes);
HBaseTestCase.addContent(hri, Bytes.toString(fam2), Bytes.toString(col1), HTestConst.addContent(hri, Bytes.toString(fam2), Bytes.toString(col1),
firstRowBytes, secondRowBytes); firstRowBytes, secondRowBytes);
Delete dc = new Delete(firstRowBytes); Delete dc = new Delete(firstRowBytes);
@ -524,9 +524,9 @@ public class TestScanner {
region.delete(dc); region.delete(dc);
region.flush(true); region.flush(true);
HBaseTestCase.addContent(hri, Bytes.toString(fam1), Bytes.toString(col1), HTestConst.addContent(hri, Bytes.toString(fam1), Bytes.toString(col1),
secondRowBytes, thirdRowBytes); secondRowBytes, thirdRowBytes);
HBaseTestCase.addContent(hri, Bytes.toString(fam2), Bytes.toString(col1), HTestConst.addContent(hri, Bytes.toString(fam2), Bytes.toString(col1),
secondRowBytes, thirdRowBytes); secondRowBytes, thirdRowBytes);
region.flush(true); region.flush(true);

View File

@ -17,61 +17,87 @@
*/ */
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.Iterator; import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Random; import java.util.concurrent.ThreadLocalRandom;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseClassTestRule; import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HBaseTestingUtility; import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Durability;
import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.RegionInfo;
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.ClassRule; import org.junit.ClassRule;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
import org.slf4j.Logger; import org.slf4j.Logger;
import org.slf4j.LoggerFactory; import org.slf4j.LoggerFactory;
@Category({RegionServerTests.class, SmallTests.class}) @Category({ RegionServerTests.class, SmallTests.class })
public class TestWideScanner extends HBaseTestCase { public class TestWideScanner {
@ClassRule @ClassRule
public static final HBaseClassTestRule CLASS_RULE = public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestWideScanner.class); HBaseClassTestRule.forClass(TestWideScanner.class);
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
private static final Logger LOG = LoggerFactory.getLogger(TestWideScanner.class); private static final Logger LOG = LoggerFactory.getLogger(TestWideScanner.class);
static final byte[] A = Bytes.toBytes("A"); private static final byte[] A = Bytes.toBytes("A");
static final byte[] B = Bytes.toBytes("B"); private static final byte[] B = Bytes.toBytes("B");
static final byte[] C = Bytes.toBytes("C"); private static final byte[] C = Bytes.toBytes("C");
static byte[][] COLUMNS = { A, B, C }; private static byte[][] COLUMNS = { A, B, C };
static final Random rng = new Random();
static final TableDescriptorBuilder.ModifyableTableDescriptor TESTTABLEDESC = private static final TableDescriptor TESTTABLEDESC;
new TableDescriptorBuilder.ModifyableTableDescriptor(TableName.valueOf("testwidescan"));
static { static {
TableDescriptorBuilder builder =
TableDescriptorBuilder.newBuilder(TableName.valueOf("testwidescan"));
for (byte[] cfName : new byte[][] { A, B, C }) { for (byte[] cfName : new byte[][] { A, B, C }) {
TESTTABLEDESC.setColumnFamily( // Keep versions to help debugging.
new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(cfName) builder.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(cfName).setMaxVersions(100)
// Keep versions to help debugging. .setBlocksize(8 * 1024).build());
.setMaxVersions(100)
.setBlocksize(8 * 1024)
);
} }
TESTTABLEDESC = builder.build();
} }
/** HRegionInfo for root region */ /** HRegionInfo for root region */
HRegion r; private static HRegion REGION;
@BeforeClass
public static void setUp() throws IOException {
Path testDir = UTIL.getDataTestDir();
RegionInfo hri = RegionInfoBuilder.newBuilder(TESTTABLEDESC.getTableName()).build();
REGION =
HBaseTestingUtility.createRegionAndWAL(hri, testDir, UTIL.getConfiguration(), TESTTABLEDESC);
}
@AfterClass
public static void tearDown() throws IOException {
if (REGION != null) {
HBaseTestingUtility.closeRegionAndWAL(REGION);
REGION = null;
}
UTIL.cleanupTestDir();
}
private int addWideContent(HRegion region) throws IOException { private int addWideContent(HRegion region) throws IOException {
int count = 0; int count = 0;
@ -85,7 +111,7 @@ public class TestWideScanner extends HBaseTestCase {
Put put = new Put(row); Put put = new Put(row);
put.setDurability(Durability.SKIP_WAL); put.setDurability(Durability.SKIP_WAL);
long ts1 = ++ts; long ts1 = ++ts;
put.addColumn(COLUMNS[rng.nextInt(COLUMNS.length)], b, ts1, b); put.addColumn(COLUMNS[ThreadLocalRandom.current().nextInt(COLUMNS.length)], b, ts1, b);
region.put(put); region.put(put);
count++; count++;
} }
@ -97,17 +123,15 @@ public class TestWideScanner extends HBaseTestCase {
@Test @Test
public void testWideScanBatching() throws IOException { public void testWideScanBatching() throws IOException {
final int batch = 256; final int batch = 256;
try { int inserted = addWideContent(REGION);
this.r = createNewHRegion(TESTTABLEDESC, null, null); List<Cell> results = new ArrayList<>();
int inserted = addWideContent(this.r); Scan scan = new Scan();
List<Cell> results = new ArrayList<>(); scan.addFamily(A);
Scan scan = new Scan(); scan.addFamily(B);
scan.addFamily(A); scan.addFamily(C);
scan.addFamily(B); scan.readVersions(100);
scan.addFamily(C); scan.setBatch(batch);
scan.readVersions(100); try (InternalScanner s = REGION.getScanner(scan)) {
scan.setBatch(batch);
InternalScanner s = r.getScanner(scan);
int total = 0; int total = 0;
int i = 0; int i = 0;
boolean more; boolean more;
@ -124,7 +148,7 @@ public class TestWideScanner extends HBaseTestCase {
if (results.size() > 0) { if (results.size() > 0) {
// assert that all results are from the same row // assert that all results are from the same row
byte[] row = CellUtil.cloneRow(results.get(0)); byte[] row = CellUtil.cloneRow(results.get(0));
for (Cell kv: results) { for (Cell kv : results) {
assertTrue(Bytes.equals(row, CellUtil.cloneRow(kv))); assertTrue(Bytes.equals(row, CellUtil.cloneRow(kv)));
} }
} }
@ -133,22 +157,16 @@ public class TestWideScanner extends HBaseTestCase {
// trigger ChangedReadersObservers // trigger ChangedReadersObservers
Iterator<KeyValueScanner> scanners = Iterator<KeyValueScanner> scanners =
((HRegion.RegionScannerImpl)s).storeHeap.getHeap().iterator(); ((HRegion.RegionScannerImpl) s).storeHeap.getHeap().iterator();
while (scanners.hasNext()) { while (scanners.hasNext()) {
StoreScanner ss = (StoreScanner)scanners.next(); StoreScanner ss = (StoreScanner) scanners.next();
ss.updateReaders(Collections.EMPTY_LIST, Collections.EMPTY_LIST); ss.updateReaders(Collections.emptyList(), Collections.emptyList());
} }
} while (more); } while (more);
// assert that the scanner returned all values // assert that the scanner returned all values
LOG.info("inserted " + inserted + ", scanned " + total); LOG.info("inserted " + inserted + ", scanned " + total);
assertEquals(total, inserted); assertEquals(total, inserted);
s.close();
} finally {
HBaseTestingUtility.closeRegionAndWAL(this.r);
} }
} }
} }