HBASE-24510 Remove HBaseTestCase and GenericTestUtils (#1859)
Signed-off-by: Michael Stack <stack@apache.org>
This commit is contained in:
parent
89b7b5a7f9
commit
16116fa35e
@ -1,321 +0,0 @@
|
|||||||
/**
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
|
||||||
* or more contributor license agreements. See the NOTICE file
|
|
||||||
* distributed with this work for additional information
|
|
||||||
* regarding copyright ownership. The ASF licenses this file
|
|
||||||
* to you under the Apache License, Version 2.0 (the
|
|
||||||
* "License"); you may not use this file except in compliance
|
|
||||||
* with the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.apache.hadoop.hbase;
|
|
||||||
|
|
||||||
import java.io.File;
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.lang.management.ManagementFactory;
|
|
||||||
import java.lang.management.ThreadInfo;
|
|
||||||
import java.lang.management.ThreadMXBean;
|
|
||||||
import java.lang.reflect.InvocationTargetException;
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Random;
|
|
||||||
import java.util.Set;
|
|
||||||
import java.util.concurrent.CountDownLatch;
|
|
||||||
import java.util.concurrent.TimeoutException;
|
|
||||||
import java.util.concurrent.atomic.AtomicInteger;
|
|
||||||
import java.util.regex.Pattern;
|
|
||||||
import org.apache.hadoop.fs.FileUtil;
|
|
||||||
import org.apache.hadoop.util.Time;
|
|
||||||
import org.junit.Assert;
|
|
||||||
import org.mockito.invocation.InvocationOnMock;
|
|
||||||
import org.mockito.stubbing.Answer;
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
|
|
||||||
import org.apache.hbase.thirdparty.com.google.common.base.Joiner;
|
|
||||||
import org.apache.hbase.thirdparty.com.google.common.base.Supplier;
|
|
||||||
import org.apache.hbase.thirdparty.com.google.common.collect.Sets;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Test provides some very generic helpers which might be used across the tests
|
|
||||||
*/
|
|
||||||
public abstract class GenericTestUtils {
|
|
||||||
|
|
||||||
private static final AtomicInteger sequence = new AtomicInteger();
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Extracts the name of the method where the invocation has happened
|
|
||||||
* @return String name of the invoking method
|
|
||||||
*/
|
|
||||||
public static String getMethodName() {
|
|
||||||
return Thread.currentThread().getStackTrace()[2].getMethodName();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Generates a process-wide unique sequence number.
|
|
||||||
* @return a unique sequence number
|
|
||||||
*/
|
|
||||||
public static int uniqueSequenceId() {
|
|
||||||
return sequence.incrementAndGet();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Assert that a given file exists.
|
|
||||||
*/
|
|
||||||
public static void assertExists(File f) {
|
|
||||||
Assert.assertTrue("File " + f + " should exist", f.exists());
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* List all of the files in 'dir' that match the regex 'pattern'.
|
|
||||||
* Then check that this list is identical to 'expectedMatches'.
|
|
||||||
* @throws IOException if the dir is inaccessible
|
|
||||||
*/
|
|
||||||
public static void assertGlobEquals(File dir, String pattern,
|
|
||||||
String ... expectedMatches) throws IOException {
|
|
||||||
|
|
||||||
Set<String> found = Sets.newTreeSet();
|
|
||||||
for (File f : FileUtil.listFiles(dir)) {
|
|
||||||
if (f.getName().matches(pattern)) {
|
|
||||||
found.add(f.getName());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
Set<String> expectedSet = Sets.newTreeSet(
|
|
||||||
Arrays.asList(expectedMatches));
|
|
||||||
Assert.assertEquals("Bad files matching " + pattern + " in " + dir,
|
|
||||||
Joiner.on(",").join(expectedSet),
|
|
||||||
Joiner.on(",").join(found));
|
|
||||||
}
|
|
||||||
|
|
||||||
public static void waitFor(Supplier<Boolean> check,
|
|
||||||
int checkEveryMillis, int waitForMillis)
|
|
||||||
throws TimeoutException, InterruptedException
|
|
||||||
{
|
|
||||||
long st = Time.now();
|
|
||||||
do {
|
|
||||||
boolean result = check.get();
|
|
||||||
if (result) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
Thread.sleep(checkEveryMillis);
|
|
||||||
} while (Time.now() - st < waitForMillis);
|
|
||||||
|
|
||||||
throw new TimeoutException("Timed out waiting for condition. " +
|
|
||||||
"Thread diagnostics:\n" +
|
|
||||||
TimedOutTestsListener.buildThreadDiagnosticString());
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Mockito answer helper that triggers one latch as soon as the
|
|
||||||
* method is called, then waits on another before continuing.
|
|
||||||
*/
|
|
||||||
public static class DelayAnswer implements Answer<Object> {
|
|
||||||
private final Logger LOG;
|
|
||||||
|
|
||||||
private final CountDownLatch fireLatch = new CountDownLatch(1);
|
|
||||||
private final CountDownLatch waitLatch = new CountDownLatch(1);
|
|
||||||
private final CountDownLatch resultLatch = new CountDownLatch(1);
|
|
||||||
|
|
||||||
private final AtomicInteger fireCounter = new AtomicInteger(0);
|
|
||||||
private final AtomicInteger resultCounter = new AtomicInteger(0);
|
|
||||||
|
|
||||||
// Result fields set after proceed() is called.
|
|
||||||
private volatile Throwable thrown;
|
|
||||||
private volatile Object returnValue;
|
|
||||||
|
|
||||||
public DelayAnswer(Logger log) {
|
|
||||||
this.LOG = log;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Wait until the method is called.
|
|
||||||
*/
|
|
||||||
public void waitForCall() throws InterruptedException {
|
|
||||||
fireLatch.await();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Tell the method to proceed.
|
|
||||||
* This should only be called after waitForCall()
|
|
||||||
*/
|
|
||||||
public void proceed() {
|
|
||||||
waitLatch.countDown();
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Object answer(InvocationOnMock invocation) throws Throwable {
|
|
||||||
LOG.info("DelayAnswer firing fireLatch");
|
|
||||||
fireCounter.getAndIncrement();
|
|
||||||
fireLatch.countDown();
|
|
||||||
try {
|
|
||||||
LOG.info("DelayAnswer waiting on waitLatch");
|
|
||||||
waitLatch.await();
|
|
||||||
LOG.info("DelayAnswer delay complete");
|
|
||||||
} catch (InterruptedException ie) {
|
|
||||||
throw new IOException("Interrupted waiting on latch", ie);
|
|
||||||
}
|
|
||||||
return passThrough(invocation);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected Object passThrough(InvocationOnMock invocation) throws Throwable {
|
|
||||||
try {
|
|
||||||
Object ret = invocation.callRealMethod();
|
|
||||||
returnValue = ret;
|
|
||||||
return ret;
|
|
||||||
} catch (Throwable t) {
|
|
||||||
thrown = t;
|
|
||||||
throw t;
|
|
||||||
} finally {
|
|
||||||
resultCounter.incrementAndGet();
|
|
||||||
resultLatch.countDown();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* After calling proceed(), this will wait until the call has
|
|
||||||
* completed and a result has been returned to the caller.
|
|
||||||
*/
|
|
||||||
public void waitForResult() throws InterruptedException {
|
|
||||||
resultLatch.await();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* After the call has gone through, return any exception that
|
|
||||||
* was thrown, or null if no exception was thrown.
|
|
||||||
*/
|
|
||||||
public Throwable getThrown() {
|
|
||||||
return thrown;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* After the call has gone through, return the call's return value,
|
|
||||||
* or null in case it was void or an exception was thrown.
|
|
||||||
*/
|
|
||||||
public Object getReturnValue() {
|
|
||||||
return returnValue;
|
|
||||||
}
|
|
||||||
|
|
||||||
public int getFireCount() {
|
|
||||||
return fireCounter.get();
|
|
||||||
}
|
|
||||||
|
|
||||||
public int getResultCount() {
|
|
||||||
return resultCounter.get();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* An Answer implementation that simply forwards all calls through
|
|
||||||
* to a delegate.
|
|
||||||
*
|
|
||||||
* This is useful as the default Answer for a mock object, to create
|
|
||||||
* something like a spy on an RPC proxy. For example:
|
|
||||||
* <code>
|
|
||||||
* NamenodeProtocol origNNProxy = secondary.getNameNode();
|
|
||||||
* NamenodeProtocol spyNNProxy = Mockito.mock(NameNodeProtocol.class,
|
|
||||||
* new DelegateAnswer(origNNProxy);
|
|
||||||
* doThrow(...).when(spyNNProxy).getBlockLocations(...);
|
|
||||||
* ...
|
|
||||||
* </code>
|
|
||||||
*/
|
|
||||||
public static class DelegateAnswer implements Answer<Object> {
|
|
||||||
private final Object delegate;
|
|
||||||
private final Logger log;
|
|
||||||
|
|
||||||
public DelegateAnswer(Object delegate) {
|
|
||||||
this(null, delegate);
|
|
||||||
}
|
|
||||||
|
|
||||||
public DelegateAnswer(Logger log, Object delegate) {
|
|
||||||
this.log = log;
|
|
||||||
this.delegate = delegate;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Object answer(InvocationOnMock invocation) throws Throwable {
|
|
||||||
try {
|
|
||||||
if (log != null) {
|
|
||||||
log.info("Call to " + invocation + " on " + delegate,
|
|
||||||
new Exception("TRACE"));
|
|
||||||
}
|
|
||||||
return invocation.getMethod().invoke(
|
|
||||||
delegate, invocation.getArguments());
|
|
||||||
} catch (InvocationTargetException ite) {
|
|
||||||
throw ite.getCause();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* An Answer implementation which sleeps for a random number of milliseconds
|
|
||||||
* between 0 and a configurable value before delegating to the real
|
|
||||||
* implementation of the method. This can be useful for drawing out race
|
|
||||||
* conditions.
|
|
||||||
*/
|
|
||||||
public static class SleepAnswer implements Answer<Object> {
|
|
||||||
private final int maxSleepTime;
|
|
||||||
private static Random r = new Random();
|
|
||||||
|
|
||||||
public SleepAnswer(int maxSleepTime) {
|
|
||||||
this.maxSleepTime = maxSleepTime;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public Object answer(InvocationOnMock invocation) throws Throwable {
|
|
||||||
boolean interrupted = false;
|
|
||||||
try {
|
|
||||||
Thread.sleep(r.nextInt(maxSleepTime));
|
|
||||||
} catch (InterruptedException ie) {
|
|
||||||
interrupted = true;
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
return invocation.callRealMethod();
|
|
||||||
} finally {
|
|
||||||
if (interrupted) {
|
|
||||||
Thread.currentThread().interrupt();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static void assertMatches(String output, String pattern) {
|
|
||||||
Assert.assertTrue("Expected output to match /" + pattern + "/" +
|
|
||||||
" but got:\n" + output,
|
|
||||||
Pattern.compile(pattern).matcher(output).find());
|
|
||||||
}
|
|
||||||
|
|
||||||
public static void assertValueNear(long expected, long actual, long allowedError) {
|
|
||||||
assertValueWithinRange(expected - allowedError, expected + allowedError, actual);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static void assertValueWithinRange(long expectedMin, long expectedMax,
|
|
||||||
long actual) {
|
|
||||||
Assert.assertTrue("Expected " + actual + " to be in range (" + expectedMin + ","
|
|
||||||
+ expectedMax + ")", expectedMin <= actual && actual <= expectedMax);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Assert that there are no threads running whose name matches the
|
|
||||||
* given regular expression.
|
|
||||||
* @param regex the regex to match against
|
|
||||||
*/
|
|
||||||
public static void assertNoThreadsMatching(String regex) {
|
|
||||||
Pattern pattern = Pattern.compile(regex);
|
|
||||||
ThreadMXBean threadBean = ManagementFactory.getThreadMXBean();
|
|
||||||
|
|
||||||
ThreadInfo[] infos = threadBean.getThreadInfo(threadBean.getAllThreadIds(), 20);
|
|
||||||
for (ThreadInfo info : infos) {
|
|
||||||
if (info == null) continue;
|
|
||||||
if (pattern.matcher(info.getThreadName()).matches()) {
|
|
||||||
Assert.fail("Leaked thread: " + info + "\n" +
|
|
||||||
Joiner.on("\n").join(info.getStackTrace()));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,459 +0,0 @@
|
|||||||
/**
|
|
||||||
*
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
|
||||||
* or more contributor license agreements. See the NOTICE file
|
|
||||||
* distributed with this work for additional information
|
|
||||||
* regarding copyright ownership. The ASF licenses this file
|
|
||||||
* to you under the Apache License, Version 2.0 (the
|
|
||||||
* "License"); you may not use this file except in compliance
|
|
||||||
* with the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.apache.hadoop.hbase;
|
|
||||||
|
|
||||||
import java.io.IOException;
|
|
||||||
import java.nio.charset.StandardCharsets;
|
|
||||||
import java.util.NavigableMap;
|
|
||||||
import junit.framework.AssertionFailedError;
|
|
||||||
import junit.framework.TestCase;
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
|
||||||
import org.apache.hadoop.fs.Path;
|
|
||||||
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
|
|
||||||
import org.apache.hadoop.hbase.client.Durability;
|
|
||||||
import org.apache.hadoop.hbase.client.Get;
|
|
||||||
import org.apache.hadoop.hbase.client.Put;
|
|
||||||
import org.apache.hadoop.hbase.client.RegionInfo;
|
|
||||||
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
|
|
||||||
import org.apache.hadoop.hbase.client.Result;
|
|
||||||
import org.apache.hadoop.hbase.client.Table;
|
|
||||||
import org.apache.hadoop.hbase.client.TableDescriptor;
|
|
||||||
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
|
|
||||||
import org.apache.hadoop.hbase.log.HBaseMarkers;
|
|
||||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
|
||||||
import org.apache.hadoop.hbase.regionserver.Region;
|
|
||||||
import org.apache.hadoop.hbase.regionserver.RegionAsTable;
|
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
|
||||||
import org.apache.hadoop.hbase.util.CommonFSUtils;
|
|
||||||
import org.apache.hadoop.hbase.util.FSTableDescriptors;
|
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
|
||||||
import org.slf4j.Logger;
|
|
||||||
import org.slf4j.LoggerFactory;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Abstract HBase test class. Initializes a few things that can come in handly
|
|
||||||
* like an HBaseConfiguration and filesystem.
|
|
||||||
* @deprecated since 2.0.0 and will be removed in 3.0.0. Write junit4 unit tests using
|
|
||||||
* {@link HBaseTestingUtility}.
|
|
||||||
* @see HBaseTestingUtility
|
|
||||||
* @see <a href="https://issues.apache.org/jira/browse/HBASE-11912">HBASE-11912</a>
|
|
||||||
*/
|
|
||||||
@Deprecated
|
|
||||||
public abstract class HBaseTestCase extends TestCase {
|
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(HBaseTestCase.class);
|
|
||||||
|
|
||||||
protected final static byte [] fam1 = Bytes.toBytes("colfamily11");
|
|
||||||
protected final static byte [] fam2 = Bytes.toBytes("colfamily21");
|
|
||||||
protected final static byte [] fam3 = Bytes.toBytes("colfamily31");
|
|
||||||
|
|
||||||
protected static final byte [][] COLUMNS = {fam1, fam2, fam3};
|
|
||||||
|
|
||||||
private boolean localfs = false;
|
|
||||||
protected static Path testDir = null;
|
|
||||||
protected FileSystem fs = null;
|
|
||||||
protected HRegion meta = null;
|
|
||||||
protected static final char FIRST_CHAR = 'a';
|
|
||||||
protected static final char LAST_CHAR = 'z';
|
|
||||||
protected static final String PUNCTUATION = "~`@#$%^&*()-_+=:;',.<>/?[]{}|";
|
|
||||||
protected static final byte [] START_KEY_BYTES = {FIRST_CHAR, FIRST_CHAR, FIRST_CHAR};
|
|
||||||
protected String START_KEY = new String(START_KEY_BYTES, HConstants.UTF8_CHARSET);
|
|
||||||
protected static final int MAXVERSIONS = 3;
|
|
||||||
|
|
||||||
protected final HBaseTestingUtility testUtil = new HBaseTestingUtility();
|
|
||||||
|
|
||||||
public volatile Configuration conf = testUtil.getConfiguration();
|
|
||||||
public final FSTableDescriptors fsTableDescriptors;
|
|
||||||
{
|
|
||||||
try {
|
|
||||||
fsTableDescriptors = new FSTableDescriptors(conf);
|
|
||||||
} catch (IOException e) {
|
|
||||||
throw new RuntimeException("Failed to init descriptors", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/** constructor */
|
|
||||||
public HBaseTestCase() {
|
|
||||||
super();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @param name
|
|
||||||
*/
|
|
||||||
public HBaseTestCase(String name) {
|
|
||||||
super(name);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Note that this method must be called after the mini hdfs cluster has
|
|
||||||
* started or we end up with a local file system.
|
|
||||||
*/
|
|
||||||
@Override
|
|
||||||
protected void setUp() throws Exception {
|
|
||||||
super.setUp();
|
|
||||||
localfs =
|
|
||||||
(conf.get("fs.defaultFS", "file:///").compareTo("file:///") == 0);
|
|
||||||
|
|
||||||
if (fs == null) {
|
|
||||||
this.fs = FileSystem.get(conf);
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
if (localfs) {
|
|
||||||
testDir = getUnitTestdir(getName());
|
|
||||||
if (fs.exists(testDir)) {
|
|
||||||
fs.delete(testDir, true);
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
testDir = CommonFSUtils.getRootDir(conf);
|
|
||||||
}
|
|
||||||
} catch (Exception e) {
|
|
||||||
LOG.error(HBaseMarkers.FATAL, "error during setup", e);
|
|
||||||
throw e;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
protected void tearDown() throws Exception {
|
|
||||||
try {
|
|
||||||
if (localfs) {
|
|
||||||
if (this.fs.exists(testDir)) {
|
|
||||||
this.fs.delete(testDir, true);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} catch (Exception e) {
|
|
||||||
LOG.error(HBaseMarkers.FATAL, "error during tear down", e);
|
|
||||||
}
|
|
||||||
super.tearDown();
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* @see HBaseTestingUtility#getBaseTestDir
|
|
||||||
* @param testName
|
|
||||||
* @return directory to use for this test
|
|
||||||
*/
|
|
||||||
protected Path getUnitTestdir(String testName) {
|
|
||||||
return testUtil.getDataTestDir(testName);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* You must call close on the returned region and then close on the log file it created. Do
|
|
||||||
* {@link HBaseTestingUtility#closeRegionAndWAL(HRegion)} to close both the region and the WAL.
|
|
||||||
* @param tableDescriptor TableDescriptor
|
|
||||||
* @param startKey Start Key
|
|
||||||
* @param endKey End Key
|
|
||||||
* @return An {@link HRegion}
|
|
||||||
* @throws IOException If thrown by
|
|
||||||
* {@link #createNewHRegion(TableDescriptor, byte[], byte[], Configuration)}
|
|
||||||
*/
|
|
||||||
public HRegion createNewHRegion(TableDescriptor tableDescriptor, byte [] startKey,
|
|
||||||
byte [] endKey)
|
|
||||||
throws IOException {
|
|
||||||
return createNewHRegion(tableDescriptor, startKey, endKey, this.conf);
|
|
||||||
}
|
|
||||||
|
|
||||||
public HRegion createNewHRegion(TableDescriptor tableDescriptor, byte[] startKey, byte[] endKey,
|
|
||||||
Configuration conf) throws IOException {
|
|
||||||
RegionInfo hri = RegionInfoBuilder.newBuilder(tableDescriptor.getTableName())
|
|
||||||
.setStartKey(startKey).setEndKey(endKey).build();
|
|
||||||
return HBaseTestingUtility.createRegionAndWAL(hri, testDir, conf, tableDescriptor);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected HRegion openClosedRegion(final HRegion closedRegion)
|
|
||||||
throws IOException {
|
|
||||||
return HRegion.openHRegion(closedRegion, null);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a table of name {@code name} with {@link #COLUMNS} for
|
|
||||||
* families.
|
|
||||||
* @param name Name to give table.
|
|
||||||
* @return Column descriptor.
|
|
||||||
*/
|
|
||||||
protected TableDescriptor createTableDescriptor(final String name) {
|
|
||||||
return createTableDescriptor(name, MAXVERSIONS);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a table of name {@code name} with {@link #COLUMNS} for
|
|
||||||
* families.
|
|
||||||
* @param name Name to give table.
|
|
||||||
* @param versions How many versions to allow per column.
|
|
||||||
* @return Column descriptor.
|
|
||||||
*/
|
|
||||||
protected TableDescriptor createTableDescriptor(final String name,
|
|
||||||
final int versions) {
|
|
||||||
return createTableDescriptor(name, HColumnDescriptor.DEFAULT_MIN_VERSIONS,
|
|
||||||
versions, HConstants.FOREVER, HColumnDescriptor.DEFAULT_KEEP_DELETED);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Create a table of name {@code name} with {@link #COLUMNS} for
|
|
||||||
* families.
|
|
||||||
* @param name Name to give table.
|
|
||||||
* @param versions How many versions to allow per column.
|
|
||||||
* @return Column descriptor.
|
|
||||||
*/
|
|
||||||
protected TableDescriptor createTableDescriptor(final String name,
|
|
||||||
final int minVersions, final int versions, final int ttl, KeepDeletedCells keepDeleted) {
|
|
||||||
TableDescriptorBuilder.ModifyableTableDescriptor tableDescriptor =
|
|
||||||
new TableDescriptorBuilder.ModifyableTableDescriptor(TableName.valueOf(name));
|
|
||||||
for (byte[] cfName : new byte[][]{ fam1, fam2, fam3 }) {
|
|
||||||
tableDescriptor.setColumnFamily(
|
|
||||||
new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(cfName)
|
|
||||||
.setMinVersions(minVersions)
|
|
||||||
.setMaxVersions(versions)
|
|
||||||
.setKeepDeletedCells(keepDeleted)
|
|
||||||
.setBlockCacheEnabled(false)
|
|
||||||
.setTimeToLive(ttl)
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return tableDescriptor;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add content to region <code>r</code> on the passed column
|
|
||||||
* <code>column</code>.
|
|
||||||
* Adds data of the from 'aaa', 'aab', etc where key and value are the same.
|
|
||||||
* @param r
|
|
||||||
* @param columnFamily
|
|
||||||
* @param column
|
|
||||||
* @throws IOException
|
|
||||||
* @return count of what we added.
|
|
||||||
*/
|
|
||||||
public static long addContent(final Region r, final byte [] columnFamily, final byte[] column)
|
|
||||||
throws IOException {
|
|
||||||
byte [] startKey = r.getRegionInfo().getStartKey();
|
|
||||||
byte [] endKey = r.getRegionInfo().getEndKey();
|
|
||||||
byte [] startKeyBytes = startKey;
|
|
||||||
if (startKeyBytes == null || startKeyBytes.length == 0) {
|
|
||||||
startKeyBytes = START_KEY_BYTES;
|
|
||||||
}
|
|
||||||
return addContent(new RegionAsTable(r), Bytes.toString(columnFamily), Bytes.toString(column),
|
|
||||||
startKeyBytes, endKey, -1);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static long addContent(final Region r, final byte [] columnFamily) throws IOException {
|
|
||||||
return addContent(r, columnFamily, null);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add content to region <code>r</code> on the passed column
|
|
||||||
* <code>column</code>.
|
|
||||||
* Adds data of the from 'aaa', 'aab', etc where key and value are the same.
|
|
||||||
* @throws IOException
|
|
||||||
* @return count of what we added.
|
|
||||||
*/
|
|
||||||
public static long addContent(final Table updater,
|
|
||||||
final String columnFamily) throws IOException {
|
|
||||||
return addContent(updater, columnFamily, START_KEY_BYTES, null);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static long addContent(final Table updater, final String family,
|
|
||||||
final String column) throws IOException {
|
|
||||||
return addContent(updater, family, column, START_KEY_BYTES, null);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add content to region <code>r</code> on the passed column
|
|
||||||
* <code>column</code>.
|
|
||||||
* Adds data of the from 'aaa', 'aab', etc where key and value are the same.
|
|
||||||
* @return count of what we added.
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
|
||||||
public static long addContent(final Table updater, final String columnFamily,
|
|
||||||
final byte [] startKeyBytes, final byte [] endKey)
|
|
||||||
throws IOException {
|
|
||||||
return addContent(updater, columnFamily, null, startKeyBytes, endKey, -1);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static long addContent(final Table updater, final String family, String column,
|
|
||||||
final byte [] startKeyBytes, final byte [] endKey) throws IOException {
|
|
||||||
return addContent(updater, family, column, startKeyBytes, endKey, -1);
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Add content to region <code>r</code> on the passed column
|
|
||||||
* <code>column</code>.
|
|
||||||
* Adds data of the from 'aaa', 'aab', etc where key and value are the same.
|
|
||||||
* @return count of what we added.
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
|
||||||
public static long addContent(final Table updater,
|
|
||||||
final String columnFamily,
|
|
||||||
final String column,
|
|
||||||
final byte [] startKeyBytes, final byte [] endKey, final long ts)
|
|
||||||
throws IOException {
|
|
||||||
long count = 0;
|
|
||||||
// Add rows of three characters. The first character starts with the
|
|
||||||
// 'a' character and runs up to 'z'. Per first character, we run the
|
|
||||||
// second character over same range. And same for the third so rows
|
|
||||||
// (and values) look like this: 'aaa', 'aab', 'aac', etc.
|
|
||||||
char secondCharStart = (char)startKeyBytes[1];
|
|
||||||
char thirdCharStart = (char)startKeyBytes[2];
|
|
||||||
EXIT: for (char c = (char)startKeyBytes[0]; c <= LAST_CHAR; c++) {
|
|
||||||
for (char d = secondCharStart; d <= LAST_CHAR; d++) {
|
|
||||||
for (char e = thirdCharStart; e <= LAST_CHAR; e++) {
|
|
||||||
byte [] t = new byte [] {(byte)c, (byte)d, (byte)e};
|
|
||||||
if (endKey != null && endKey.length > 0
|
|
||||||
&& Bytes.compareTo(endKey, t) <= 0) {
|
|
||||||
break EXIT;
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
Put put;
|
|
||||||
if(ts != -1) {
|
|
||||||
put = new Put(t, ts);
|
|
||||||
} else {
|
|
||||||
put = new Put(t);
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
StringBuilder sb = new StringBuilder();
|
|
||||||
if (column != null && column.contains(":")) {
|
|
||||||
sb.append(column);
|
|
||||||
} else {
|
|
||||||
if (columnFamily != null) {
|
|
||||||
sb.append(columnFamily);
|
|
||||||
if (!columnFamily.endsWith(":")) {
|
|
||||||
sb.append(":");
|
|
||||||
}
|
|
||||||
if (column != null) {
|
|
||||||
sb.append(column);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
byte[][] split =
|
|
||||||
CellUtil.parseColumn(Bytes.toBytes(sb.toString()));
|
|
||||||
if(split.length == 1) {
|
|
||||||
byte[] qualifier = new byte[0];
|
|
||||||
put.addColumn(split[0], qualifier, t);
|
|
||||||
} else {
|
|
||||||
put.addColumn(split[0], split[1], t);
|
|
||||||
}
|
|
||||||
put.setDurability(Durability.SKIP_WAL);
|
|
||||||
updater.put(put);
|
|
||||||
count++;
|
|
||||||
} catch (RuntimeException ex) {
|
|
||||||
ex.printStackTrace();
|
|
||||||
throw ex;
|
|
||||||
} catch (IOException ex) {
|
|
||||||
ex.printStackTrace();
|
|
||||||
throw ex;
|
|
||||||
}
|
|
||||||
} catch (RuntimeException ex) {
|
|
||||||
ex.printStackTrace();
|
|
||||||
throw ex;
|
|
||||||
} catch (IOException ex) {
|
|
||||||
ex.printStackTrace();
|
|
||||||
throw ex;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// Set start character back to FIRST_CHAR after we've done first loop.
|
|
||||||
thirdCharStart = FIRST_CHAR;
|
|
||||||
}
|
|
||||||
secondCharStart = FIRST_CHAR;
|
|
||||||
}
|
|
||||||
return count;
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void assertResultEquals(final HRegion region, final byte [] row,
|
|
||||||
final byte [] family, final byte [] qualifier, final long timestamp,
|
|
||||||
final byte [] value) throws IOException {
|
|
||||||
Get get = new Get(row);
|
|
||||||
get.setTimestamp(timestamp);
|
|
||||||
Result res = region.get(get);
|
|
||||||
NavigableMap<byte[], NavigableMap<byte[], NavigableMap<Long, byte[]>>> map =
|
|
||||||
res.getMap();
|
|
||||||
byte [] res_value = map.get(family).get(qualifier).get(timestamp);
|
|
||||||
|
|
||||||
if (value == null) {
|
|
||||||
assertEquals(Bytes.toString(family) + " " + Bytes.toString(qualifier) +
|
|
||||||
" at timestamp " + timestamp, null, res_value);
|
|
||||||
} else {
|
|
||||||
if (res_value == null) {
|
|
||||||
fail(Bytes.toString(family) + " " + Bytes.toString(qualifier) +
|
|
||||||
" at timestamp " + timestamp + "\" was expected to be \"" +
|
|
||||||
Bytes.toStringBinary(value) + " but was null");
|
|
||||||
}
|
|
||||||
if (res_value != null) {
|
|
||||||
assertEquals(Bytes.toString(family) + " " + Bytes.toString(qualifier) +
|
|
||||||
" at timestamp " +
|
|
||||||
timestamp, value, new String(res_value, StandardCharsets.UTF_8));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Common method to close down a MiniDFSCluster and the associated file system
|
|
||||||
*
|
|
||||||
* @param cluster
|
|
||||||
*/
|
|
||||||
public static void shutdownDfs(MiniDFSCluster cluster) {
|
|
||||||
if (cluster != null) {
|
|
||||||
LOG.info("Shutting down Mini DFS ");
|
|
||||||
try {
|
|
||||||
cluster.shutdown();
|
|
||||||
} catch (Exception e) {
|
|
||||||
/// Can get a java.lang.reflect.UndeclaredThrowableException thrown
|
|
||||||
// here because of an InterruptedException. Don't let exceptions in
|
|
||||||
// here be cause of test failure.
|
|
||||||
}
|
|
||||||
try {
|
|
||||||
FileSystem fs = cluster.getFileSystem();
|
|
||||||
if (fs != null) {
|
|
||||||
LOG.info("Shutting down FileSystem");
|
|
||||||
fs.close();
|
|
||||||
}
|
|
||||||
FileSystem.closeAll();
|
|
||||||
} catch (IOException e) {
|
|
||||||
LOG.error("error closing file system", e);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* You must call {@link #closeRootAndMeta()} when done after calling this method. It does cleanup.
|
|
||||||
* @throws IOException
|
|
||||||
*/
|
|
||||||
protected void createMetaRegion() throws IOException {
|
|
||||||
FSTableDescriptors fsTableDescriptors = new FSTableDescriptors(conf);
|
|
||||||
meta = HBaseTestingUtility.createRegionAndWAL(RegionInfoBuilder.FIRST_META_REGIONINFO, testDir,
|
|
||||||
conf, fsTableDescriptors.get(TableName.META_TABLE_NAME));
|
|
||||||
}
|
|
||||||
|
|
||||||
protected void closeRootAndMeta() throws IOException {
|
|
||||||
HBaseTestingUtility.closeRegionAndWAL(meta);
|
|
||||||
}
|
|
||||||
|
|
||||||
public static void assertByteEquals(byte[] expected,
|
|
||||||
byte[] actual) {
|
|
||||||
if (Bytes.compareTo(expected, actual) != 0) {
|
|
||||||
throw new AssertionFailedError("expected:<" +
|
|
||||||
Bytes.toString(expected) + "> but was:<" +
|
|
||||||
Bytes.toString(actual) + ">");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
public static void assertEquals(byte[] expected,
|
|
||||||
byte[] actual) {
|
|
||||||
if (Bytes.compareTo(expected, actual) != 0) {
|
|
||||||
throw new AssertionFailedError("expected:<" +
|
|
||||||
Bytes.toStringBinary(expected) + "> but was:<" +
|
|
||||||
Bytes.toStringBinary(actual) + ">");
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
@ -16,16 +16,21 @@
|
|||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
|
import org.apache.hadoop.hbase.client.Durability;
|
||||||
|
import org.apache.hadoop.hbase.client.Put;
|
||||||
|
import org.apache.hadoop.hbase.client.Table;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.Region;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.RegionAsTable;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Similar to {@link HConstants} but for tests. Also provides some simple
|
* Similar to {@link HConstants} but for tests. Also provides some simple static utility functions
|
||||||
* static utility functions to generate test data.
|
* to generate test data.
|
||||||
*/
|
*/
|
||||||
public class HTestConst {
|
public class HTestConst {
|
||||||
|
|
||||||
@ -34,15 +39,13 @@ public class HTestConst {
|
|||||||
|
|
||||||
public static final String DEFAULT_TABLE_STR = "MyTestTable";
|
public static final String DEFAULT_TABLE_STR = "MyTestTable";
|
||||||
public static final byte[] DEFAULT_TABLE_BYTES = Bytes.toBytes(DEFAULT_TABLE_STR);
|
public static final byte[] DEFAULT_TABLE_BYTES = Bytes.toBytes(DEFAULT_TABLE_STR);
|
||||||
public static final TableName DEFAULT_TABLE =
|
public static final TableName DEFAULT_TABLE = TableName.valueOf(DEFAULT_TABLE_BYTES);
|
||||||
TableName.valueOf(DEFAULT_TABLE_BYTES);
|
|
||||||
|
|
||||||
public static final String DEFAULT_CF_STR = "MyDefaultCF";
|
public static final String DEFAULT_CF_STR = "MyDefaultCF";
|
||||||
public static final byte[] DEFAULT_CF_BYTES = Bytes.toBytes(DEFAULT_CF_STR);
|
public static final byte[] DEFAULT_CF_BYTES = Bytes.toBytes(DEFAULT_CF_STR);
|
||||||
|
|
||||||
public static final Set<String> DEFAULT_CF_STR_SET =
|
public static final Set<String> DEFAULT_CF_STR_SET =
|
||||||
Collections.unmodifiableSet(new HashSet<>(
|
Collections.unmodifiableSet(new HashSet<>(Arrays.asList(new String[] { DEFAULT_CF_STR })));
|
||||||
Arrays.asList(new String[] { DEFAULT_CF_STR })));
|
|
||||||
|
|
||||||
public static final String DEFAULT_ROW_STR = "MyTestRow";
|
public static final String DEFAULT_ROW_STR = "MyTestRow";
|
||||||
public static final byte[] DEFAULT_ROW_BYTES = Bytes.toBytes(DEFAULT_ROW_STR);
|
public static final byte[] DEFAULT_ROW_BYTES = Bytes.toBytes(DEFAULT_ROW_STR);
|
||||||
@ -53,9 +56,13 @@ public class HTestConst {
|
|||||||
public static String DEFAULT_VALUE_STR = "MyTestValue";
|
public static String DEFAULT_VALUE_STR = "MyTestValue";
|
||||||
public static byte[] DEFAULT_VALUE_BYTES = Bytes.toBytes(DEFAULT_VALUE_STR);
|
public static byte[] DEFAULT_VALUE_BYTES = Bytes.toBytes(DEFAULT_VALUE_STR);
|
||||||
|
|
||||||
|
private static final char FIRST_CHAR = 'a';
|
||||||
|
private static final char LAST_CHAR = 'z';
|
||||||
|
private static final byte[] START_KEY_BYTES = { FIRST_CHAR, FIRST_CHAR, FIRST_CHAR };
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Generate the given number of unique byte sequences by appending numeric
|
* Generate the given number of unique byte sequences by appending numeric suffixes (ASCII
|
||||||
* suffixes (ASCII representations of decimal numbers).
|
* representations of decimal numbers).
|
||||||
*/
|
*/
|
||||||
public static byte[][] makeNAscii(byte[] base, int n) {
|
public static byte[][] makeNAscii(byte[] base, int n) {
|
||||||
byte[][] ret = new byte[n][];
|
byte[][] ret = new byte[n][];
|
||||||
@ -66,4 +73,112 @@ public class HTestConst {
|
|||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add content to region <code>r</code> on the passed column <code>column</code>. Adds data of the
|
||||||
|
* from 'aaa', 'aab', etc where key and value are the same.
|
||||||
|
* @return count of what we added.
|
||||||
|
*/
|
||||||
|
public static long addContent(final Region r, final byte[] columnFamily, final byte[] column)
|
||||||
|
throws IOException {
|
||||||
|
byte[] startKey = r.getRegionInfo().getStartKey();
|
||||||
|
byte[] endKey = r.getRegionInfo().getEndKey();
|
||||||
|
byte[] startKeyBytes = startKey;
|
||||||
|
if (startKeyBytes == null || startKeyBytes.length == 0) {
|
||||||
|
startKeyBytes = START_KEY_BYTES;
|
||||||
|
}
|
||||||
|
return addContent(new RegionAsTable(r), Bytes.toString(columnFamily), Bytes.toString(column),
|
||||||
|
startKeyBytes, endKey, -1);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static long addContent(final Region r, final byte[] columnFamily) throws IOException {
|
||||||
|
return addContent(r, columnFamily, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add content to region <code>r</code> on the passed column <code>column</code>. Adds data of the
|
||||||
|
* from 'aaa', 'aab', etc where key and value are the same.
|
||||||
|
* @return count of what we added.
|
||||||
|
*/
|
||||||
|
public static long addContent(Table updater, String columnFamily) throws IOException {
|
||||||
|
return addContent(updater, columnFamily, START_KEY_BYTES, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static long addContent(Table updater, String family, String column) throws IOException {
|
||||||
|
return addContent(updater, family, column, START_KEY_BYTES, null);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add content to region <code>r</code> on the passed column <code>column</code>. Adds data of the
|
||||||
|
* from 'aaa', 'aab', etc where key and value are the same.
|
||||||
|
* @return count of what we added.
|
||||||
|
*/
|
||||||
|
public static long addContent(Table updater, String columnFamily, byte[] startKeyBytes,
|
||||||
|
byte[] endKey) throws IOException {
|
||||||
|
return addContent(updater, columnFamily, null, startKeyBytes, endKey, -1);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static long addContent(Table updater, String family, String column, byte[] startKeyBytes,
|
||||||
|
byte[] endKey) throws IOException {
|
||||||
|
return addContent(updater, family, column, startKeyBytes, endKey, -1);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Add content to region <code>r</code> on the passed column <code>column</code>. Adds data of the
|
||||||
|
* from 'aaa', 'aab', etc where key and value are the same.
|
||||||
|
* @return count of what we added.
|
||||||
|
*/
|
||||||
|
public static long addContent(Table updater, String columnFamily, String column,
|
||||||
|
byte[] startKeyBytes, byte[] endKey, long ts) throws IOException {
|
||||||
|
long count = 0;
|
||||||
|
// Add rows of three characters. The first character starts with the
|
||||||
|
// 'a' character and runs up to 'z'. Per first character, we run the
|
||||||
|
// second character over same range. And same for the third so rows
|
||||||
|
// (and values) look like this: 'aaa', 'aab', 'aac', etc.
|
||||||
|
char secondCharStart = (char) startKeyBytes[1];
|
||||||
|
char thirdCharStart = (char) startKeyBytes[2];
|
||||||
|
EXIT: for (char c = (char) startKeyBytes[0]; c <= LAST_CHAR; c++) {
|
||||||
|
for (char d = secondCharStart; d <= LAST_CHAR; d++) {
|
||||||
|
for (char e = thirdCharStart; e <= LAST_CHAR; e++) {
|
||||||
|
byte[] t = new byte[] { (byte) c, (byte) d, (byte) e };
|
||||||
|
if (endKey != null && endKey.length > 0 && Bytes.compareTo(endKey, t) <= 0) {
|
||||||
|
break EXIT;
|
||||||
|
}
|
||||||
|
Put put;
|
||||||
|
if (ts != -1) {
|
||||||
|
put = new Put(t, ts);
|
||||||
|
} else {
|
||||||
|
put = new Put(t);
|
||||||
|
}
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
if (column != null && column.contains(":")) {
|
||||||
|
sb.append(column);
|
||||||
|
} else {
|
||||||
|
if (columnFamily != null) {
|
||||||
|
sb.append(columnFamily);
|
||||||
|
if (!columnFamily.endsWith(":")) {
|
||||||
|
sb.append(":");
|
||||||
|
}
|
||||||
|
if (column != null) {
|
||||||
|
sb.append(column);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
byte[][] split = CellUtil.parseColumn(Bytes.toBytes(sb.toString()));
|
||||||
|
if (split.length == 1) {
|
||||||
|
byte[] qualifier = new byte[0];
|
||||||
|
put.addColumn(split[0], qualifier, t);
|
||||||
|
} else {
|
||||||
|
put.addColumn(split[0], split[1], t);
|
||||||
|
}
|
||||||
|
put.setDurability(Durability.SKIP_WAL);
|
||||||
|
updater.put(put);
|
||||||
|
count++;
|
||||||
|
}
|
||||||
|
// Set start character back to FIRST_CHAR after we've done first loop.
|
||||||
|
thirdCharStart = FIRST_CHAR;
|
||||||
|
}
|
||||||
|
secondCharStart = FIRST_CHAR;
|
||||||
|
}
|
||||||
|
return count;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
@ -17,7 +17,7 @@
|
|||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase.client;
|
package org.apache.hadoop.hbase.client;
|
||||||
|
|
||||||
import static org.apache.hadoop.hbase.HBaseTestCase.assertByteEquals;
|
import static org.junit.Assert.assertArrayEquals;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
||||||
@ -158,7 +158,7 @@ public class TestResult extends TestCase {
|
|||||||
for (int i = 0; i < 100; ++i) {
|
for (int i = 0; i < 100; ++i) {
|
||||||
final byte[] qf = Bytes.toBytes(i);
|
final byte[] qf = Bytes.toBytes(i);
|
||||||
|
|
||||||
assertByteEquals(Bytes.add(value, Bytes.toBytes(i)), r.getValue(family, qf));
|
assertArrayEquals(Bytes.add(value, Bytes.toBytes(i)), r.getValue(family, qf));
|
||||||
assertTrue(r.containsColumn(family, qf));
|
assertTrue(r.containsColumn(family, qf));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -177,7 +177,7 @@ public class TestResult extends TestCase {
|
|||||||
for (int i = 0; i < 100; ++i) {
|
for (int i = 0; i < 100; ++i) {
|
||||||
final byte[] qf = Bytes.toBytes(i);
|
final byte[] qf = Bytes.toBytes(i);
|
||||||
|
|
||||||
assertByteEquals(Bytes.add(value, Bytes.toBytes(i)), r.getValue(family, qf));
|
assertArrayEquals(Bytes.add(value, Bytes.toBytes(i)), r.getValue(family, qf));
|
||||||
assertTrue(r.containsColumn(family, qf));
|
assertTrue(r.containsColumn(family, qf));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -38,9 +38,9 @@ import org.apache.hadoop.hbase.Cell;
|
|||||||
import org.apache.hadoop.hbase.Coprocessor;
|
import org.apache.hadoop.hbase.Coprocessor;
|
||||||
import org.apache.hadoop.hbase.CoprocessorEnvironment;
|
import org.apache.hadoop.hbase.CoprocessorEnvironment;
|
||||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
|
||||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
import org.apache.hadoop.hbase.HTestConst;
|
||||||
import org.apache.hadoop.hbase.TableName;
|
import org.apache.hadoop.hbase.TableName;
|
||||||
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
|
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
|
||||||
import org.apache.hadoop.hbase.client.Get;
|
import org.apache.hadoop.hbase.client.Get;
|
||||||
@ -295,7 +295,7 @@ public class TestCoprocessorInterface {
|
|||||||
HRegion region = initHRegion(tableName, name.getMethodName(), hc, new Class<?>[]{}, families);
|
HRegion region = initHRegion(tableName, name.getMethodName(), hc, new Class<?>[]{}, families);
|
||||||
|
|
||||||
for (int i = 0; i < 3; i++) {
|
for (int i = 0; i < 3; i++) {
|
||||||
HBaseTestCase.addContent(region, fam3);
|
HTestConst.addContent(region, fam3);
|
||||||
region.flush(true);
|
region.flush(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -357,7 +357,7 @@ public class TestCoprocessorInterface {
|
|||||||
HRegion region = initHRegion(tableName, name.getMethodName(), hc,
|
HRegion region = initHRegion(tableName, name.getMethodName(), hc,
|
||||||
new Class<?>[]{CoprocessorImpl.class}, families);
|
new Class<?>[]{CoprocessorImpl.class}, families);
|
||||||
for (int i = 0; i < 3; i++) {
|
for (int i = 0; i < 3; i++) {
|
||||||
HBaseTestCase.addContent(region, fam3);
|
HTestConst.addContent(region, fam3);
|
||||||
region.flush(true);
|
region.flush(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -17,12 +17,11 @@
|
|||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase.regionserver;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import static org.apache.hadoop.hbase.HBaseTestCase.addContent;
|
import static org.apache.hadoop.hbase.HTestConst.addContent;
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hbase.Cell;
|
import org.apache.hadoop.hbase.Cell;
|
||||||
|
@ -27,7 +27,7 @@ import static org.junit.Assert.assertEquals;
|
|||||||
import static org.junit.Assert.assertFalse;
|
import static org.junit.Assert.assertFalse;
|
||||||
import static org.junit.Assert.assertTrue;
|
import static org.junit.Assert.assertTrue;
|
||||||
import static org.junit.Assert.fail;
|
import static org.junit.Assert.fail;
|
||||||
import static org.mockito.Matchers.any;
|
import static org.mockito.ArgumentMatchers.any;
|
||||||
import static org.mockito.Mockito.doAnswer;
|
import static org.mockito.Mockito.doAnswer;
|
||||||
import static org.mockito.Mockito.mock;
|
import static org.mockito.Mockito.mock;
|
||||||
import static org.mockito.Mockito.spy;
|
import static org.mockito.Mockito.spy;
|
||||||
@ -48,9 +48,9 @@ import org.apache.hadoop.fs.Path;
|
|||||||
import org.apache.hadoop.hbase.ChoreService;
|
import org.apache.hadoop.hbase.ChoreService;
|
||||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
|
||||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
import org.apache.hadoop.hbase.HTestConst;
|
||||||
import org.apache.hadoop.hbase.Waiter;
|
import org.apache.hadoop.hbase.Waiter;
|
||||||
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
|
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
|
||||||
import org.apache.hadoop.hbase.client.Delete;
|
import org.apache.hadoop.hbase.client.Delete;
|
||||||
@ -173,7 +173,7 @@ public class TestCompaction {
|
|||||||
for (int j = 0; j < jmax; j++) {
|
for (int j = 0; j < jmax; j++) {
|
||||||
p.addColumn(COLUMN_FAMILY, Bytes.toBytes(j), pad);
|
p.addColumn(COLUMN_FAMILY, Bytes.toBytes(j), pad);
|
||||||
}
|
}
|
||||||
HBaseTestCase.addContent(loader, Bytes.toString(COLUMN_FAMILY));
|
HTestConst.addContent(loader, Bytes.toString(COLUMN_FAMILY));
|
||||||
loader.put(p);
|
loader.put(p);
|
||||||
r.flush(true);
|
r.flush(true);
|
||||||
}
|
}
|
||||||
@ -249,7 +249,7 @@ public class TestCompaction {
|
|||||||
for (int j = 0; j < jmax; j++) {
|
for (int j = 0; j < jmax; j++) {
|
||||||
p.addColumn(COLUMN_FAMILY, Bytes.toBytes(j), pad);
|
p.addColumn(COLUMN_FAMILY, Bytes.toBytes(j), pad);
|
||||||
}
|
}
|
||||||
HBaseTestCase.addContent(loader, Bytes.toString(COLUMN_FAMILY));
|
HTestConst.addContent(loader, Bytes.toString(COLUMN_FAMILY));
|
||||||
loader.put(p);
|
loader.put(p);
|
||||||
r.flush(true);
|
r.flush(true);
|
||||||
}
|
}
|
||||||
@ -329,7 +329,7 @@ public class TestCompaction {
|
|||||||
|
|
||||||
private void createStoreFile(final HRegion region, String family) throws IOException {
|
private void createStoreFile(final HRegion region, String family) throws IOException {
|
||||||
Table loader = new RegionAsTable(region);
|
Table loader = new RegionAsTable(region);
|
||||||
HBaseTestCase.addContent(loader, family);
|
HTestConst.addContent(loader, family);
|
||||||
region.flush(true);
|
region.flush(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -503,7 +503,7 @@ public class TestCompaction {
|
|||||||
for (int j = 0; j < jmax; j++) {
|
for (int j = 0; j < jmax; j++) {
|
||||||
p.addColumn(COLUMN_FAMILY, Bytes.toBytes(j), pad);
|
p.addColumn(COLUMN_FAMILY, Bytes.toBytes(j), pad);
|
||||||
}
|
}
|
||||||
HBaseTestCase.addContent(loader, Bytes.toString(COLUMN_FAMILY));
|
HTestConst.addContent(loader, Bytes.toString(COLUMN_FAMILY));
|
||||||
loader.put(p);
|
loader.put(p);
|
||||||
r.flush(true);
|
r.flush(true);
|
||||||
}
|
}
|
||||||
|
@ -17,6 +17,13 @@
|
|||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase.regionserver;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertArrayEquals;
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertFalse;
|
||||||
|
import static org.junit.Assert.assertNotNull;
|
||||||
|
import static org.junit.Assert.assertNull;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
import static org.junit.Assert.fail;
|
||||||
import static org.mockito.Mockito.mock;
|
import static org.mockito.Mockito.mock;
|
||||||
import static org.mockito.Mockito.when;
|
import static org.mockito.Mockito.when;
|
||||||
|
|
||||||
@ -37,7 +44,6 @@ import org.apache.hadoop.fs.Path;
|
|||||||
import org.apache.hadoop.hbase.Cell;
|
import org.apache.hadoop.hbase.Cell;
|
||||||
import org.apache.hadoop.hbase.CellUtil;
|
import org.apache.hadoop.hbase.CellUtil;
|
||||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
|
||||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.KeyValue;
|
import org.apache.hadoop.hbase.KeyValue;
|
||||||
@ -59,7 +65,6 @@ import org.apache.hadoop.hbase.io.hfile.CacheStats;
|
|||||||
import org.apache.hadoop.hbase.io.hfile.HFileContext;
|
import org.apache.hadoop.hbase.io.hfile.HFileContext;
|
||||||
import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
|
import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
|
||||||
import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder;
|
import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder;
|
||||||
import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl;
|
|
||||||
import org.apache.hadoop.hbase.io.hfile.HFileInfo;
|
import org.apache.hadoop.hbase.io.hfile.HFileInfo;
|
||||||
import org.apache.hadoop.hbase.io.hfile.HFileScanner;
|
import org.apache.hadoop.hbase.io.hfile.HFileScanner;
|
||||||
import org.apache.hadoop.hbase.io.hfile.ReaderContext;
|
import org.apache.hadoop.hbase.io.hfile.ReaderContext;
|
||||||
@ -70,11 +75,13 @@ import org.apache.hadoop.hbase.util.BloomFilterFactory;
|
|||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.hbase.util.ChecksumType;
|
import org.apache.hadoop.hbase.util.ChecksumType;
|
||||||
import org.apache.hadoop.hbase.util.CommonFSUtils;
|
import org.apache.hadoop.hbase.util.CommonFSUtils;
|
||||||
import org.junit.After;
|
import org.junit.AfterClass;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.ClassRule;
|
import org.junit.ClassRule;
|
||||||
|
import org.junit.Rule;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.junit.experimental.categories.Category;
|
import org.junit.experimental.categories.Category;
|
||||||
|
import org.junit.rules.TestName;
|
||||||
import org.mockito.Mockito;
|
import org.mockito.Mockito;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
@ -87,7 +94,7 @@ import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
|
|||||||
* Test HStoreFile
|
* Test HStoreFile
|
||||||
*/
|
*/
|
||||||
@Category({ RegionServerTests.class, MediumTests.class })
|
@Category({ RegionServerTests.class, MediumTests.class })
|
||||||
public class TestHStoreFile extends HBaseTestCase {
|
public class TestHStoreFile {
|
||||||
|
|
||||||
@ClassRule
|
@ClassRule
|
||||||
public static final HBaseClassTestRule CLASS_RULE =
|
public static final HBaseClassTestRule CLASS_RULE =
|
||||||
@ -100,22 +107,31 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
private static final ChecksumType CKTYPE = ChecksumType.CRC32C;
|
private static final ChecksumType CKTYPE = ChecksumType.CRC32C;
|
||||||
private static final int CKBYTES = 512;
|
private static final int CKBYTES = 512;
|
||||||
private static String TEST_FAMILY = "cf";
|
private static String TEST_FAMILY = "cf";
|
||||||
|
private static final char FIRST_CHAR = 'a';
|
||||||
|
private static final char LAST_CHAR = 'z';
|
||||||
|
|
||||||
|
@Rule
|
||||||
|
public TestName name = new TestName();
|
||||||
|
|
||||||
|
private Configuration conf;
|
||||||
|
private Path testDir;
|
||||||
|
private FileSystem fs;
|
||||||
|
|
||||||
@Override
|
|
||||||
@Before
|
@Before
|
||||||
public void setUp() throws Exception {
|
public void setUp() throws IOException {
|
||||||
super.setUp();
|
conf = TEST_UTIL.getConfiguration();
|
||||||
|
testDir = TEST_UTIL.getDataTestDir(name.getMethodName());
|
||||||
|
fs = testDir.getFileSystem(conf);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@AfterClass
|
||||||
@After
|
public static void tearDownAfterClass() {
|
||||||
public void tearDown() throws Exception {
|
TEST_UTIL.cleanupTestDir();
|
||||||
super.tearDown();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Write a file and then assert that we can read from top and bottom halves
|
* Write a file and then assert that we can read from top and bottom halves using two
|
||||||
* using two HalfMapFiles.
|
* HalfMapFiles.
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testBasicHalfMapFile() throws Exception {
|
public void testBasicHalfMapFile() throws Exception {
|
||||||
@ -126,9 +142,7 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
|
|
||||||
HFileContext meta = new HFileContextBuilder().withBlockSize(2 * 1024).build();
|
HFileContext meta = new HFileContextBuilder().withBlockSize(2 * 1024).build();
|
||||||
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
|
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
|
||||||
.withFilePath(regionFs.createTempName())
|
.withFilePath(regionFs.createTempName()).withFileContext(meta).build();
|
||||||
.withFileContext(meta)
|
|
||||||
.build();
|
|
||||||
writeStoreFile(writer);
|
writeStoreFile(writer);
|
||||||
|
|
||||||
Path sfPath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath());
|
Path sfPath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath());
|
||||||
@ -137,15 +151,15 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
private void writeStoreFile(final StoreFileWriter writer) throws IOException {
|
private void writeStoreFile(final StoreFileWriter writer) throws IOException {
|
||||||
writeStoreFile(writer, Bytes.toBytes(getName()), Bytes.toBytes(getName()));
|
writeStoreFile(writer, Bytes.toBytes(name.getMethodName()),
|
||||||
|
Bytes.toBytes(name.getMethodName()));
|
||||||
}
|
}
|
||||||
|
|
||||||
// pick an split point (roughly halfway)
|
// pick an split point (roughly halfway)
|
||||||
byte[] SPLITKEY = new byte[] { (LAST_CHAR + FIRST_CHAR) / 2, FIRST_CHAR };
|
byte[] SPLITKEY = new byte[] { (LAST_CHAR + FIRST_CHAR) / 2, FIRST_CHAR };
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Writes HStoreKey and ImmutableBytes data to passed writer and
|
* Writes HStoreKey and ImmutableBytes data to passed writer and then closes it.
|
||||||
* then closes it.
|
|
||||||
* @param writer
|
* @param writer
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
@ -165,8 +179,8 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test that our mechanism of writing store files in one region to reference
|
* Test that our mechanism of writing store files in one region to reference store files in other
|
||||||
* store files in other regions works.
|
* regions works.
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testReference() throws IOException {
|
public void testReference() throws IOException {
|
||||||
@ -178,9 +192,7 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
|
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
|
||||||
// Make a store file and write data to it.
|
// Make a store file and write data to it.
|
||||||
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
|
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
|
||||||
.withFilePath(regionFs.createTempName())
|
.withFilePath(regionFs.createTempName()).withFileContext(meta).build();
|
||||||
.withFileContext(meta)
|
|
||||||
.build();
|
|
||||||
writeStoreFile(writer);
|
writeStoreFile(writer);
|
||||||
|
|
||||||
Path hsfPath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath());
|
Path hsfPath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath());
|
||||||
@ -252,12 +264,13 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
byte[] cf = Bytes.toBytes("ty");
|
byte[] cf = Bytes.toBytes("ty");
|
||||||
ColumnFamilyDescriptor cfd = ColumnFamilyDescriptorBuilder.of(cf);
|
ColumnFamilyDescriptor cfd = ColumnFamilyDescriptorBuilder.of(cf);
|
||||||
when(store.getColumnFamilyDescriptor()).thenReturn(cfd);
|
when(store.getColumnFamilyDescriptor()).thenReturn(cfd);
|
||||||
StoreFileScanner scanner =
|
try (StoreFileScanner scanner =
|
||||||
new StoreFileScanner(reader, mock(HFileScanner.class), false, false, 0, 0, true);
|
new StoreFileScanner(reader, mock(HFileScanner.class), false, false, 0, 0, true)) {
|
||||||
Scan scan = new Scan();
|
Scan scan = new Scan();
|
||||||
scan.setColumnFamilyTimeRange(cf, 0, 1);
|
scan.setColumnFamilyTimeRange(cf, 0, 1);
|
||||||
assertFalse(scanner.shouldUseScanner(scan, store, 0));
|
assertFalse(scanner.shouldUseScanner(scan, store, 0));
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testHFileLink() throws IOException {
|
public void testHFileLink() throws IOException {
|
||||||
@ -266,22 +279,20 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
// force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/
|
// force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/
|
||||||
Configuration testConf = new Configuration(this.conf);
|
Configuration testConf = new Configuration(this.conf);
|
||||||
CommonFSUtils.setRootDir(testConf, testDir);
|
CommonFSUtils.setRootDir(testConf, testDir);
|
||||||
HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(
|
HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(testConf, fs,
|
||||||
testConf, fs, CommonFSUtils.getTableDir(testDir, hri.getTable()), hri);
|
CommonFSUtils.getTableDir(testDir, hri.getTable()), hri);
|
||||||
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
|
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
|
||||||
|
|
||||||
// Make a store file and write data to it.
|
// Make a store file and write data to it.
|
||||||
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
|
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
|
||||||
.withFilePath(regionFs.createTempName())
|
.withFilePath(regionFs.createTempName()).withFileContext(meta).build();
|
||||||
.withFileContext(meta)
|
|
||||||
.build();
|
|
||||||
writeStoreFile(writer);
|
writeStoreFile(writer);
|
||||||
|
|
||||||
Path storeFilePath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath());
|
Path storeFilePath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath());
|
||||||
Path dstPath = new Path(regionFs.getTableDir(), new Path("test-region", TEST_FAMILY));
|
Path dstPath = new Path(regionFs.getTableDir(), new Path("test-region", TEST_FAMILY));
|
||||||
HFileLink.create(testConf, this.fs, dstPath, hri, storeFilePath.getName());
|
HFileLink.create(testConf, this.fs, dstPath, hri, storeFilePath.getName());
|
||||||
Path linkFilePath = new Path(dstPath,
|
Path linkFilePath =
|
||||||
HFileLink.createHFileLinkName(hri, storeFilePath.getName()));
|
new Path(dstPath, HFileLink.createHFileLinkName(hri, storeFilePath.getName()));
|
||||||
|
|
||||||
// Try to open store file from link
|
// Try to open store file from link
|
||||||
StoreFileInfo storeFileInfo = new StoreFileInfo(testConf, this.fs, linkFilePath, true);
|
StoreFileInfo storeFileInfo = new StoreFileInfo(testConf, this.fs, linkFilePath, true);
|
||||||
@ -300,8 +311,8 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This test creates an hfile and then the dir structures and files to verify that references
|
* This test creates an hfile and then the dir structures and files to verify that references to
|
||||||
* to hfilelinks (created by snapshot clones) can be properly interpreted.
|
* hfilelinks (created by snapshot clones) can be properly interpreted.
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testReferenceToHFileLink() throws IOException {
|
public void testReferenceToHFileLink() throws IOException {
|
||||||
@ -317,21 +328,18 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
|
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
|
||||||
// Make a store file and write data to it. <root>/<tablename>/<rgn>/<cf>/<file>
|
// Make a store file and write data to it. <root>/<tablename>/<rgn>/<cf>/<file>
|
||||||
StoreFileWriter writer = new StoreFileWriter.Builder(testConf, cacheConf, this.fs)
|
StoreFileWriter writer = new StoreFileWriter.Builder(testConf, cacheConf, this.fs)
|
||||||
.withFilePath(regionFs.createTempName())
|
.withFilePath(regionFs.createTempName()).withFileContext(meta).build();
|
||||||
.withFileContext(meta)
|
|
||||||
.build();
|
|
||||||
writeStoreFile(writer);
|
writeStoreFile(writer);
|
||||||
Path storeFilePath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath());
|
Path storeFilePath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath());
|
||||||
|
|
||||||
// create link to store file. <root>/clone/region/<cf>/<hfile>-<region>-<table>
|
// create link to store file. <root>/clone/region/<cf>/<hfile>-<region>-<table>
|
||||||
RegionInfo hriClone = RegionInfoBuilder.newBuilder(TableName.valueOf("clone")).build();
|
RegionInfo hriClone = RegionInfoBuilder.newBuilder(TableName.valueOf("clone")).build();
|
||||||
HRegionFileSystem cloneRegionFs = HRegionFileSystem.createRegionOnFileSystem(
|
HRegionFileSystem cloneRegionFs = HRegionFileSystem.createRegionOnFileSystem(testConf, fs,
|
||||||
testConf, fs, CommonFSUtils.getTableDir(testDir, hri.getTable()),
|
CommonFSUtils.getTableDir(testDir, hri.getTable()), hriClone);
|
||||||
hriClone);
|
|
||||||
Path dstPath = cloneRegionFs.getStoreDir(TEST_FAMILY);
|
Path dstPath = cloneRegionFs.getStoreDir(TEST_FAMILY);
|
||||||
HFileLink.create(testConf, this.fs, dstPath, hri, storeFilePath.getName());
|
HFileLink.create(testConf, this.fs, dstPath, hri, storeFilePath.getName());
|
||||||
Path linkFilePath = new Path(dstPath,
|
Path linkFilePath =
|
||||||
HFileLink.createHFileLinkName(hri, storeFilePath.getName()));
|
new Path(dstPath, HFileLink.createHFileLinkName(hri, storeFilePath.getName()));
|
||||||
|
|
||||||
// create splits of the link.
|
// create splits of the link.
|
||||||
// <root>/clone/splitA/<cf>/<reftohfilelink>,
|
// <root>/clone/splitA/<cf>/<reftohfilelink>,
|
||||||
@ -419,8 +427,7 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
|
|
||||||
if ((PrivateCellUtil.compare(topScanner.getReader().getComparator(), midKV, key.array(),
|
if ((PrivateCellUtil.compare(topScanner.getReader().getComparator(), midKV, key.array(),
|
||||||
key.arrayOffset(), key.limit())) > 0) {
|
key.arrayOffset(), key.limit())) > 0) {
|
||||||
fail("key=" + Bytes.toStringBinary(key) + " < midkey=" +
|
fail("key=" + Bytes.toStringBinary(key) + " < midkey=" + midkey);
|
||||||
midkey);
|
|
||||||
}
|
}
|
||||||
if (first) {
|
if (first) {
|
||||||
first = false;
|
first = false;
|
||||||
@ -431,14 +438,12 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
|
|
||||||
first = true;
|
first = true;
|
||||||
HFileScanner bottomScanner = bottom.getScanner(false, false);
|
HFileScanner bottomScanner = bottom.getScanner(false, false);
|
||||||
while ((!bottomScanner.isSeeked() && bottomScanner.seekTo()) ||
|
while ((!bottomScanner.isSeeked() && bottomScanner.seekTo()) || bottomScanner.next()) {
|
||||||
bottomScanner.next()) {
|
|
||||||
previous = ByteBuffer.wrap(((KeyValue) bottomScanner.getKey()).getKey());
|
previous = ByteBuffer.wrap(((KeyValue) bottomScanner.getKey()).getKey());
|
||||||
key = ByteBuffer.wrap(((KeyValue) bottomScanner.getKey()).getKey());
|
key = ByteBuffer.wrap(((KeyValue) bottomScanner.getKey()).getKey());
|
||||||
if (first) {
|
if (first) {
|
||||||
first = false;
|
first = false;
|
||||||
LOG.info("First in bottom: " +
|
LOG.info("First in bottom: " + Bytes.toString(Bytes.toBytes(previous)));
|
||||||
Bytes.toString(Bytes.toBytes(previous)));
|
|
||||||
}
|
}
|
||||||
assertTrue(key.compareTo(bbMidkeyBytes) < 0);
|
assertTrue(key.compareTo(bbMidkeyBytes) < 0);
|
||||||
}
|
}
|
||||||
@ -466,8 +471,7 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
first = true;
|
first = true;
|
||||||
topScanner = top.getScanner(false, false);
|
topScanner = top.getScanner(false, false);
|
||||||
KeyValue.KeyOnlyKeyValue keyOnlyKV = new KeyValue.KeyOnlyKeyValue();
|
KeyValue.KeyOnlyKeyValue keyOnlyKV = new KeyValue.KeyOnlyKeyValue();
|
||||||
while ((!topScanner.isSeeked() && topScanner.seekTo()) ||
|
while ((!topScanner.isSeeked() && topScanner.seekTo()) || topScanner.next()) {
|
||||||
topScanner.next()) {
|
|
||||||
key = ByteBuffer.wrap(((KeyValue) topScanner.getKey()).getKey());
|
key = ByteBuffer.wrap(((KeyValue) topScanner.getKey()).getKey());
|
||||||
keyOnlyKV.setKey(key.array(), 0 + key.arrayOffset(), key.limit());
|
keyOnlyKV.setKey(key.array(), 0 + key.arrayOffset(), key.limit());
|
||||||
assertTrue(PrivateCellUtil.compare(topScanner.getReader().getComparator(), keyOnlyKV,
|
assertTrue(PrivateCellUtil.compare(topScanner.getReader().getComparator(), keyOnlyKV,
|
||||||
@ -504,8 +508,7 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
bottom = bottomF.getReader();
|
bottom = bottomF.getReader();
|
||||||
first = true;
|
first = true;
|
||||||
bottomScanner = bottom.getScanner(false, false);
|
bottomScanner = bottom.getScanner(false, false);
|
||||||
while ((!bottomScanner.isSeeked() && bottomScanner.seekTo()) ||
|
while ((!bottomScanner.isSeeked() && bottomScanner.seekTo()) || bottomScanner.next()) {
|
||||||
bottomScanner.next()) {
|
|
||||||
key = ByteBuffer.wrap(((KeyValue) bottomScanner.getKey()).getKey());
|
key = ByteBuffer.wrap(((KeyValue) bottomScanner.getKey()).getKey());
|
||||||
if (first) {
|
if (first) {
|
||||||
first = false;
|
first = false;
|
||||||
@ -547,8 +550,8 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
long now = System.currentTimeMillis();
|
long now = System.currentTimeMillis();
|
||||||
for (int i = 0; i < 2000; i += 2) {
|
for (int i = 0; i < 2000; i += 2) {
|
||||||
String row = String.format(localFormatter, i);
|
String row = String.format(localFormatter, i);
|
||||||
KeyValue kv = new KeyValue(Bytes.toBytes(row), Bytes.toBytes("family"),
|
KeyValue kv = new KeyValue(Bytes.toBytes(row), Bytes.toBytes("family"), Bytes.toBytes("col"),
|
||||||
Bytes.toBytes("col"), now, Bytes.toBytes("value"));
|
now, Bytes.toBytes("value"));
|
||||||
writer.append(kv);
|
writer.append(kv);
|
||||||
}
|
}
|
||||||
writer.close();
|
writer.close();
|
||||||
@ -570,8 +573,7 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
TreeSet<byte[]> columns = new TreeSet<>(Bytes.BYTES_COMPARATOR);
|
TreeSet<byte[]> columns = new TreeSet<>(Bytes.BYTES_COMPARATOR);
|
||||||
columns.add(Bytes.toBytes("family:col"));
|
columns.add(Bytes.toBytes("family:col"));
|
||||||
|
|
||||||
Scan scan = new Scan().withStartRow(Bytes.toBytes(row))
|
Scan scan = new Scan().withStartRow(Bytes.toBytes(row)).withStopRow(Bytes.toBytes(row), true);
|
||||||
.withStopRow(Bytes.toBytes(row), true);
|
|
||||||
scan.addColumn(Bytes.toBytes("family"), Bytes.toBytes("family:col"));
|
scan.addColumn(Bytes.toBytes("family"), Bytes.toBytes("family:col"));
|
||||||
HStore store = mock(HStore.class);
|
HStore store = mock(HStore.class);
|
||||||
when(store.getColumnFamilyDescriptor())
|
when(store.getColumnFamilyDescriptor())
|
||||||
@ -591,60 +593,48 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
fs.delete(f, true);
|
fs.delete(f, true);
|
||||||
assertEquals("False negatives: " + falseNeg, 0, falseNeg);
|
assertEquals("False negatives: " + falseNeg, 0, falseNeg);
|
||||||
int maxFalsePos = (int) (2 * 2000 * err);
|
int maxFalsePos = (int) (2 * 2000 * err);
|
||||||
assertTrue("Too many false positives: " + falsePos + " (err=" + err + ", expected no more than "
|
assertTrue("Too many false positives: " + falsePos + " (err=" + err +
|
||||||
+ maxFalsePos + ")", falsePos <= maxFalsePos);
|
", expected no more than " + maxFalsePos + ")", falsePos <= maxFalsePos);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static final int BLOCKSIZE_SMALL = 8192;
|
private static final int BLOCKSIZE_SMALL = 8192;
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testBloomFilter() throws Exception {
|
public void testBloomFilter() throws Exception {
|
||||||
FileSystem fs = FileSystem.getLocal(conf);
|
|
||||||
conf.setFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, (float) 0.01);
|
conf.setFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, (float) 0.01);
|
||||||
conf.setBoolean(BloomFilterFactory.IO_STOREFILE_BLOOM_ENABLED, true);
|
conf.setBoolean(BloomFilterFactory.IO_STOREFILE_BLOOM_ENABLED, true);
|
||||||
|
|
||||||
// write the file
|
// write the file
|
||||||
Path f = new Path(ROOT_DIR, getName());
|
Path f = new Path(ROOT_DIR, name.getMethodName());
|
||||||
HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL)
|
HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL)
|
||||||
.withChecksumType(CKTYPE)
|
.withChecksumType(CKTYPE).withBytesPerCheckSum(CKBYTES).build();
|
||||||
.withBytesPerCheckSum(CKBYTES).build();
|
|
||||||
// Make a store file and write data to it.
|
// Make a store file and write data to it.
|
||||||
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
|
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs).withFilePath(f)
|
||||||
.withFilePath(f)
|
.withBloomType(BloomType.ROW).withMaxKeyCount(2000).withFileContext(meta).build();
|
||||||
.withBloomType(BloomType.ROW)
|
|
||||||
.withMaxKeyCount(2000)
|
|
||||||
.withFileContext(meta)
|
|
||||||
.build();
|
|
||||||
bloomWriteRead(writer, fs);
|
bloomWriteRead(writer, fs);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testDeleteFamilyBloomFilter() throws Exception {
|
public void testDeleteFamilyBloomFilter() throws Exception {
|
||||||
FileSystem fs = FileSystem.getLocal(conf);
|
|
||||||
conf.setFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, (float) 0.01);
|
conf.setFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, (float) 0.01);
|
||||||
conf.setBoolean(BloomFilterFactory.IO_STOREFILE_BLOOM_ENABLED, true);
|
conf.setBoolean(BloomFilterFactory.IO_STOREFILE_BLOOM_ENABLED, true);
|
||||||
float err = conf.getFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, 0);
|
float err = conf.getFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, 0);
|
||||||
|
|
||||||
// write the file
|
// write the file
|
||||||
Path f = new Path(ROOT_DIR, getName());
|
Path f = new Path(ROOT_DIR, name.getMethodName());
|
||||||
|
|
||||||
HFileContext meta = new HFileContextBuilder()
|
HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL)
|
||||||
.withBlockSize(BLOCKSIZE_SMALL)
|
.withChecksumType(CKTYPE).withBytesPerCheckSum(CKBYTES).build();
|
||||||
.withChecksumType(CKTYPE)
|
|
||||||
.withBytesPerCheckSum(CKBYTES).build();
|
|
||||||
// Make a store file and write data to it.
|
// Make a store file and write data to it.
|
||||||
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
|
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs).withFilePath(f)
|
||||||
.withFilePath(f)
|
.withMaxKeyCount(2000).withFileContext(meta).build();
|
||||||
.withMaxKeyCount(2000)
|
|
||||||
.withFileContext(meta)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
// add delete family
|
// add delete family
|
||||||
long now = System.currentTimeMillis();
|
long now = System.currentTimeMillis();
|
||||||
for (int i = 0; i < 2000; i += 2) {
|
for (int i = 0; i < 2000; i += 2) {
|
||||||
String row = String.format(localFormatter, i);
|
String row = String.format(localFormatter, i);
|
||||||
KeyValue kv = new KeyValue(Bytes.toBytes(row), Bytes.toBytes("family"),
|
KeyValue kv = new KeyValue(Bytes.toBytes(row), Bytes.toBytes("family"), Bytes.toBytes("col"),
|
||||||
Bytes.toBytes("col"), now, KeyValue.Type.DeleteFamily, Bytes.toBytes("value"));
|
now, KeyValue.Type.DeleteFamily, Bytes.toBytes("value"));
|
||||||
writer.append(kv);
|
writer.append(kv);
|
||||||
}
|
}
|
||||||
writer.close();
|
writer.close();
|
||||||
@ -679,8 +669,8 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
fs.delete(f, true);
|
fs.delete(f, true);
|
||||||
assertEquals("False negatives: " + falseNeg, 0, falseNeg);
|
assertEquals("False negatives: " + falseNeg, 0, falseNeg);
|
||||||
int maxFalsePos = (int) (2 * 2000 * err);
|
int maxFalsePos = (int) (2 * 2000 * err);
|
||||||
assertTrue("Too many false positives: " + falsePos + " (err=" + err
|
assertTrue("Too many false positives: " + falsePos + " (err=" + err +
|
||||||
+ ", expected no more than " + maxFalsePos, falsePos <= maxFalsePos);
|
", expected no more than " + maxFalsePos, falsePos <= maxFalsePos);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -689,13 +679,11 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
@Test
|
@Test
|
||||||
public void testReseek() throws Exception {
|
public void testReseek() throws Exception {
|
||||||
// write the file
|
// write the file
|
||||||
Path f = new Path(ROOT_DIR, getName());
|
Path f = new Path(ROOT_DIR, name.getMethodName());
|
||||||
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
|
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
|
||||||
// Make a store file and write data to it.
|
// Make a store file and write data to it.
|
||||||
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
|
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs).withFilePath(f)
|
||||||
.withFilePath(f)
|
.withFileContext(meta).build();
|
||||||
.withFileContext(meta)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
writeStoreFile(writer);
|
writeStoreFile(writer);
|
||||||
writer.close();
|
writer.close();
|
||||||
@ -737,17 +725,12 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
|
|
||||||
for (int x : new int[] { 0, 1 }) {
|
for (int x : new int[] { 0, 1 }) {
|
||||||
// write the file
|
// write the file
|
||||||
Path f = new Path(ROOT_DIR, getName() + x);
|
Path f = new Path(ROOT_DIR, name.getMethodName() + x);
|
||||||
HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL)
|
HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL)
|
||||||
.withChecksumType(CKTYPE)
|
.withChecksumType(CKTYPE).withBytesPerCheckSum(CKBYTES).build();
|
||||||
.withBytesPerCheckSum(CKBYTES).build();
|
|
||||||
// Make a store file and write data to it.
|
// Make a store file and write data to it.
|
||||||
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
|
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs).withFilePath(f)
|
||||||
.withFilePath(f)
|
.withBloomType(bt[x]).withMaxKeyCount(expKeys[x]).withFileContext(meta).build();
|
||||||
.withBloomType(bt[x])
|
|
||||||
.withMaxKeyCount(expKeys[x])
|
|
||||||
.withFileContext(meta)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
long now = System.currentTimeMillis();
|
long now = System.currentTimeMillis();
|
||||||
for (int i = 0; i < rowCount * 2; i += 2) { // rows
|
for (int i = 0; i < rowCount * 2; i += 2) { // rows
|
||||||
@ -763,12 +746,9 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
}
|
}
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
ReaderContext context = new ReaderContextBuilder()
|
ReaderContext context =
|
||||||
.withFilePath(f)
|
new ReaderContextBuilder().withFilePath(f).withFileSize(fs.getFileStatus(f).getLen())
|
||||||
.withFileSize(fs.getFileStatus(f).getLen())
|
.withFileSystem(fs).withInputStreamWrapper(new FSDataInputStreamWrapper(fs, f)).build();
|
||||||
.withFileSystem(fs)
|
|
||||||
.withInputStreamWrapper(new FSDataInputStreamWrapper(fs, f))
|
|
||||||
.build();
|
|
||||||
HFileInfo fileInfo = new HFileInfo(context, conf);
|
HFileInfo fileInfo = new HFileInfo(context, conf);
|
||||||
StoreFileReader reader =
|
StoreFileReader reader =
|
||||||
new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf);
|
new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf);
|
||||||
@ -791,12 +771,11 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
TreeSet<byte[]> columns = new TreeSet<>(Bytes.BYTES_COMPARATOR);
|
TreeSet<byte[]> columns = new TreeSet<>(Bytes.BYTES_COMPARATOR);
|
||||||
columns.add(Bytes.toBytes("col" + col));
|
columns.add(Bytes.toBytes("col" + col));
|
||||||
|
|
||||||
Scan scan = new Scan().withStartRow(Bytes.toBytes(row))
|
Scan scan =
|
||||||
.withStopRow(Bytes.toBytes(row), true);
|
new Scan().withStartRow(Bytes.toBytes(row)).withStopRow(Bytes.toBytes(row), true);
|
||||||
scan.addColumn(Bytes.toBytes("family"), Bytes.toBytes(("col" + col)));
|
scan.addColumn(Bytes.toBytes("family"), Bytes.toBytes(("col" + col)));
|
||||||
|
|
||||||
boolean exists =
|
boolean exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);
|
||||||
scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);
|
|
||||||
boolean shouldRowExist = i % 2 == 0;
|
boolean shouldRowExist = i % 2 == 0;
|
||||||
boolean shouldColExist = j % 2 == 0;
|
boolean shouldColExist = j % 2 == 0;
|
||||||
shouldColExist = shouldColExist || bt[x] == BloomType.ROW;
|
shouldColExist = shouldColExist || bt[x] == BloomType.ROW;
|
||||||
@ -824,16 +803,15 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
@Test
|
@Test
|
||||||
public void testSeqIdComparator() {
|
public void testSeqIdComparator() {
|
||||||
assertOrdering(StoreFileComparators.SEQ_ID, mockStoreFile(true, 100, 1000, -1, "/foo/123"),
|
assertOrdering(StoreFileComparators.SEQ_ID, mockStoreFile(true, 100, 1000, -1, "/foo/123"),
|
||||||
mockStoreFile(true, 100, 1000, -1, "/foo/124"),
|
mockStoreFile(true, 100, 1000, -1, "/foo/124"), mockStoreFile(true, 99, 1000, -1, "/foo/126"),
|
||||||
mockStoreFile(true, 99, 1000, -1, "/foo/126"),
|
|
||||||
mockStoreFile(true, 98, 2000, -1, "/foo/126"), mockStoreFile(false, 3453, -1, 1, "/foo/1"),
|
mockStoreFile(true, 98, 2000, -1, "/foo/126"), mockStoreFile(false, 3453, -1, 1, "/foo/1"),
|
||||||
mockStoreFile(false, 2, -1, 3, "/foo/2"), mockStoreFile(false, 1000, -1, 5, "/foo/2"),
|
mockStoreFile(false, 2, -1, 3, "/foo/2"), mockStoreFile(false, 1000, -1, 5, "/foo/2"),
|
||||||
mockStoreFile(false, 76, -1, 5, "/foo/3"));
|
mockStoreFile(false, 76, -1, 5, "/foo/3"));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Assert that the given comparator orders the given storefiles in the
|
* Assert that the given comparator orders the given storefiles in the same way that they're
|
||||||
* same way that they're passed.
|
* passed.
|
||||||
*/
|
*/
|
||||||
private void assertOrdering(Comparator<? super HStoreFile> comparator, HStoreFile... sfs) {
|
private void assertOrdering(Comparator<? super HStoreFile> comparator, HStoreFile... sfs) {
|
||||||
ArrayList<HStoreFile> sorted = Lists.newArrayList(sfs);
|
ArrayList<HStoreFile> sorted = Lists.newArrayList(sfs);
|
||||||
@ -847,10 +825,7 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
/**
|
/**
|
||||||
* Create a mock StoreFile with the given attributes.
|
* Create a mock StoreFile with the given attributes.
|
||||||
*/
|
*/
|
||||||
private HStoreFile mockStoreFile(boolean bulkLoad,
|
private HStoreFile mockStoreFile(boolean bulkLoad, long size, long bulkTimestamp, long seqId,
|
||||||
long size,
|
|
||||||
long bulkTimestamp,
|
|
||||||
long seqId,
|
|
||||||
String path) {
|
String path) {
|
||||||
HStoreFile mock = Mockito.mock(HStoreFile.class);
|
HStoreFile mock = Mockito.mock(HStoreFile.class);
|
||||||
StoreFileReader reader = Mockito.mock(StoreFileReader.class);
|
StoreFileReader reader = Mockito.mock(StoreFileReader.class);
|
||||||
@ -862,10 +837,8 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
Mockito.doReturn(OptionalLong.of(bulkTimestamp)).when(mock).getBulkLoadTimestamp();
|
Mockito.doReturn(OptionalLong.of(bulkTimestamp)).when(mock).getBulkLoadTimestamp();
|
||||||
Mockito.doReturn(seqId).when(mock).getMaxSequenceId();
|
Mockito.doReturn(seqId).when(mock).getMaxSequenceId();
|
||||||
Mockito.doReturn(new Path(path)).when(mock).getPath();
|
Mockito.doReturn(new Path(path)).when(mock).getPath();
|
||||||
String name = "mock storefile, bulkLoad=" + bulkLoad +
|
String name = "mock storefile, bulkLoad=" + bulkLoad + " bulkTimestamp=" + bulkTimestamp +
|
||||||
" bulkTimestamp=" + bulkTimestamp +
|
" seqId=" + seqId + " path=" + path;
|
||||||
" seqId=" + seqId +
|
|
||||||
" path=" + path;
|
|
||||||
Mockito.doReturn(name).when(mock).toString();
|
Mockito.doReturn(name).when(mock).toString();
|
||||||
return mock;
|
return mock;
|
||||||
}
|
}
|
||||||
@ -874,8 +847,7 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
* Generate a list of KeyValues for testing based on given parameters
|
* Generate a list of KeyValues for testing based on given parameters
|
||||||
* @return the rows key-value list
|
* @return the rows key-value list
|
||||||
*/
|
*/
|
||||||
List<KeyValue> getKeyValueSet(long[] timestamps, int numRows,
|
List<KeyValue> getKeyValueSet(long[] timestamps, int numRows, byte[] qualifier, byte[] family) {
|
||||||
byte[] qualifier, byte[] family) {
|
|
||||||
List<KeyValue> kvList = new ArrayList<>();
|
List<KeyValue> kvList = new ArrayList<>();
|
||||||
for (int i = 1; i <= numRows; i++) {
|
for (int i = 1; i <= numRows; i++) {
|
||||||
byte[] b = Bytes.toBytes(i);
|
byte[] b = Bytes.toBytes(i);
|
||||||
@ -905,12 +877,9 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
|
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
|
||||||
// Make a store file and write data to it.
|
// Make a store file and write data to it.
|
||||||
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
|
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
|
||||||
.withOutputDir(dir)
|
.withOutputDir(dir).withFileContext(meta).build();
|
||||||
.withFileContext(meta)
|
|
||||||
.build();
|
|
||||||
|
|
||||||
List<KeyValue> kvList = getKeyValueSet(timestamps,numRows,
|
List<KeyValue> kvList = getKeyValueSet(timestamps, numRows, qualifier, family);
|
||||||
qualifier, family);
|
|
||||||
|
|
||||||
for (KeyValue kv : kvList) {
|
for (KeyValue kv : kvList) {
|
||||||
writer.append(kv);
|
writer.append(kv);
|
||||||
@ -918,8 +887,8 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
writer.appendMetadata(0, false);
|
writer.appendMetadata(0, false);
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
HStoreFile hsf = new HStoreFile(this.fs, writer.getPath(), conf, cacheConf,
|
HStoreFile hsf =
|
||||||
BloomType.NONE, true);
|
new HStoreFile(this.fs, writer.getPath(), conf, cacheConf, BloomType.NONE, true);
|
||||||
HStore store = mock(HStore.class);
|
HStore store = mock(HStore.class);
|
||||||
when(store.getColumnFamilyDescriptor()).thenReturn(ColumnFamilyDescriptorBuilder.of(family));
|
when(store.getColumnFamilyDescriptor()).thenReturn(ColumnFamilyDescriptorBuilder.of(family));
|
||||||
hsf.initReader();
|
hsf.initReader();
|
||||||
@ -973,8 +942,8 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
CacheConfig cacheConf = new CacheConfig(conf, bc);
|
CacheConfig cacheConf = new CacheConfig(conf, bc);
|
||||||
Path pathCowOff = new Path(baseDir, "123456789");
|
Path pathCowOff = new Path(baseDir, "123456789");
|
||||||
StoreFileWriter writer = writeStoreFile(conf, cacheConf, pathCowOff, 3);
|
StoreFileWriter writer = writeStoreFile(conf, cacheConf, pathCowOff, 3);
|
||||||
HStoreFile hsf = new HStoreFile(this.fs, writer.getPath(), conf, cacheConf,
|
HStoreFile hsf =
|
||||||
BloomType.NONE, true);
|
new HStoreFile(this.fs, writer.getPath(), conf, cacheConf, BloomType.NONE, true);
|
||||||
LOG.debug(hsf.getPath().toString());
|
LOG.debug(hsf.getPath().toString());
|
||||||
|
|
||||||
// Read this file, we should see 3 misses
|
// Read this file, we should see 3 misses
|
||||||
@ -998,8 +967,7 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
cacheConf = new CacheConfig(conf, bc);
|
cacheConf = new CacheConfig(conf, bc);
|
||||||
Path pathCowOn = new Path(baseDir, "123456788");
|
Path pathCowOn = new Path(baseDir, "123456788");
|
||||||
writer = writeStoreFile(conf, cacheConf, pathCowOn, 3);
|
writer = writeStoreFile(conf, cacheConf, pathCowOn, 3);
|
||||||
hsf = new HStoreFile(this.fs, writer.getPath(), conf, cacheConf,
|
hsf = new HStoreFile(this.fs, writer.getPath(), conf, cacheConf, BloomType.NONE, true);
|
||||||
BloomType.NONE, true);
|
|
||||||
|
|
||||||
// Read this file, we should see 3 hits
|
// Read this file, we should see 3 hits
|
||||||
hsf.initReader();
|
hsf.initReader();
|
||||||
@ -1036,11 +1004,9 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
assertTrue(kv1.equals(kv2));
|
assertTrue(kv1.equals(kv2));
|
||||||
KeyValue keyv1 = KeyValueUtil.ensureKeyValue(kv1);
|
KeyValue keyv1 = KeyValueUtil.ensureKeyValue(kv1);
|
||||||
KeyValue keyv2 = KeyValueUtil.ensureKeyValue(kv2);
|
KeyValue keyv2 = KeyValueUtil.ensureKeyValue(kv2);
|
||||||
assertTrue(Bytes.compareTo(
|
assertTrue(Bytes.compareTo(keyv1.getBuffer(), keyv1.getKeyOffset(), keyv1.getKeyLength(),
|
||||||
keyv1.getBuffer(), keyv1.getKeyOffset(), keyv1.getKeyLength(),
|
|
||||||
keyv2.getBuffer(), keyv2.getKeyOffset(), keyv2.getKeyLength()) == 0);
|
keyv2.getBuffer(), keyv2.getKeyOffset(), keyv2.getKeyLength()) == 0);
|
||||||
assertTrue(Bytes.compareTo(
|
assertTrue(Bytes.compareTo(kv1.getValueArray(), kv1.getValueOffset(), kv1.getValueLength(),
|
||||||
kv1.getValueArray(), kv1.getValueOffset(), kv1.getValueLength(),
|
|
||||||
kv2.getValueArray(), kv2.getValueOffset(), kv2.getValueLength()) == 0);
|
kv2.getValueArray(), kv2.getValueOffset(), kv2.getValueLength()) == 0);
|
||||||
}
|
}
|
||||||
assertNull(scannerTwo.next());
|
assertNull(scannerTwo.next());
|
||||||
@ -1084,7 +1050,6 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
private Path splitStoreFile(final HRegionFileSystem regionFs, final RegionInfo hri,
|
private Path splitStoreFile(final HRegionFileSystem regionFs, final RegionInfo hri,
|
||||||
final String family, final HStoreFile sf, final byte[] splitKey, boolean isTopRef)
|
final String family, final HStoreFile sf, final byte[] splitKey, boolean isTopRef)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
FileSystem fs = regionFs.getFileSystem();
|
|
||||||
Path path = regionFs.splitStoreFile(hri, family, sf, splitKey, isTopRef, null);
|
Path path = regionFs.splitStoreFile(hri, family, sf, splitKey, isTopRef, null);
|
||||||
if (null == path) {
|
if (null == path) {
|
||||||
return null;
|
return null;
|
||||||
@ -1107,16 +1072,11 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
totalSize += kv.getLength() + 1;
|
totalSize += kv.getLength() + 1;
|
||||||
}
|
}
|
||||||
int blockSize = totalSize / numBlocks;
|
int blockSize = totalSize / numBlocks;
|
||||||
HFileContext meta = new HFileContextBuilder().withBlockSize(blockSize)
|
HFileContext meta = new HFileContextBuilder().withBlockSize(blockSize).withChecksumType(CKTYPE)
|
||||||
.withChecksumType(CKTYPE)
|
.withBytesPerCheckSum(CKBYTES).build();
|
||||||
.withBytesPerCheckSum(CKBYTES)
|
|
||||||
.build();
|
|
||||||
// Make a store file and write data to it.
|
// Make a store file and write data to it.
|
||||||
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
|
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
|
||||||
.withFilePath(path)
|
.withFilePath(path).withMaxKeyCount(2000).withFileContext(meta).build();
|
||||||
.withMaxKeyCount(2000)
|
|
||||||
.withFileContext(meta)
|
|
||||||
.build();
|
|
||||||
// We'll write N-1 KVs to ensure we don't write an extra block
|
// We'll write N-1 KVs to ensure we don't write an extra block
|
||||||
kvs.remove(kvs.size() - 1);
|
kvs.remove(kvs.size() - 1);
|
||||||
for (KeyValue kv : kvs) {
|
for (KeyValue kv : kvs) {
|
||||||
@ -1128,8 +1088,7 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Check if data block encoding information is saved correctly in HFile's
|
* Check if data block encoding information is saved correctly in HFile's file info.
|
||||||
* file info.
|
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
public void testDataBlockEncodingMetaData() throws IOException {
|
public void testDataBlockEncodingMetaData() throws IOException {
|
||||||
@ -1137,23 +1096,14 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
Path dir = new Path(new Path(testDir, "7e0102"), "familyname");
|
Path dir = new Path(new Path(testDir, "7e0102"), "familyname");
|
||||||
Path path = new Path(dir, "1234567890");
|
Path path = new Path(dir, "1234567890");
|
||||||
|
|
||||||
DataBlockEncoding dataBlockEncoderAlgo =
|
DataBlockEncoding dataBlockEncoderAlgo = DataBlockEncoding.FAST_DIFF;
|
||||||
DataBlockEncoding.FAST_DIFF;
|
|
||||||
HFileDataBlockEncoder dataBlockEncoder =
|
|
||||||
new HFileDataBlockEncoderImpl(
|
|
||||||
dataBlockEncoderAlgo);
|
|
||||||
cacheConf = new CacheConfig(conf);
|
cacheConf = new CacheConfig(conf);
|
||||||
HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL)
|
HFileContext meta =
|
||||||
.withChecksumType(CKTYPE)
|
new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL).withChecksumType(CKTYPE)
|
||||||
.withBytesPerCheckSum(CKBYTES)
|
.withBytesPerCheckSum(CKBYTES).withDataBlockEncoding(dataBlockEncoderAlgo).build();
|
||||||
.withDataBlockEncoding(dataBlockEncoderAlgo)
|
|
||||||
.build();
|
|
||||||
// Make a store file and write data to it.
|
// Make a store file and write data to it.
|
||||||
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
|
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
|
||||||
.withFilePath(path)
|
.withFilePath(path).withMaxKeyCount(2000).withFileContext(meta).build();
|
||||||
.withMaxKeyCount(2000)
|
|
||||||
.withFileContext(meta)
|
|
||||||
.build();
|
|
||||||
writer.close();
|
writer.close();
|
||||||
|
|
||||||
HStoreFile storeFile =
|
HStoreFile storeFile =
|
||||||
@ -1163,6 +1113,6 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||||||
|
|
||||||
Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
|
Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
|
||||||
byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);
|
byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);
|
||||||
assertEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
|
assertArrayEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -17,6 +17,10 @@
|
|||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase.regionserver;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertThrows;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
import java.util.Arrays;
|
||||||
@ -24,19 +28,17 @@ import java.util.List;
|
|||||||
import org.apache.hadoop.hbase.Cell;
|
import org.apache.hadoop.hbase.Cell;
|
||||||
import org.apache.hadoop.hbase.CellComparatorImpl;
|
import org.apache.hadoop.hbase.CellComparatorImpl;
|
||||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
|
||||||
import org.apache.hadoop.hbase.KeyValue;
|
import org.apache.hadoop.hbase.KeyValue;
|
||||||
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
|
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
|
||||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.hbase.util.CollectionBackedScanner;
|
import org.apache.hadoop.hbase.util.CollectionBackedScanner;
|
||||||
import org.junit.Before;
|
|
||||||
import org.junit.ClassRule;
|
import org.junit.ClassRule;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.junit.experimental.categories.Category;
|
import org.junit.experimental.categories.Category;
|
||||||
|
|
||||||
@Category({ RegionServerTests.class, SmallTests.class })
|
@Category({ RegionServerTests.class, SmallTests.class })
|
||||||
public class TestKeyValueHeap extends HBaseTestCase {
|
public class TestKeyValueHeap {
|
||||||
|
|
||||||
@ClassRule
|
@ClassRule
|
||||||
public static final HBaseClassTestRule CLASS_RULE =
|
public static final HBaseClassTestRule CLASS_RULE =
|
||||||
@ -74,15 +76,14 @@ public class TestKeyValueHeap extends HBaseTestCase {
|
|||||||
List<KeyValueScanner> scanners = new ArrayList<>(Arrays.asList(s1, s2, s3));
|
List<KeyValueScanner> scanners = new ArrayList<>(Arrays.asList(s1, s2, s3));
|
||||||
|
|
||||||
/*
|
/*
|
||||||
* Uses {@code scanners} to build a KeyValueHeap, iterates over it and asserts that returned
|
* Uses {@code scanners} to build a KeyValueHeap, iterates over it and asserts that returned Cells
|
||||||
* Cells are same as {@code expected}.
|
* are same as {@code expected}.
|
||||||
* @return List of Cells returned from scanners.
|
* @return List of Cells returned from scanners.
|
||||||
*/
|
*/
|
||||||
public List<Cell> assertCells(List<Cell> expected, List<KeyValueScanner> scanners)
|
public List<Cell> assertCells(List<Cell> expected, List<KeyValueScanner> scanners)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
// Creating KeyValueHeap
|
// Creating KeyValueHeap
|
||||||
KeyValueHeap kvh = new KeyValueHeap(scanners, CellComparatorImpl.COMPARATOR);
|
try (KeyValueHeap kvh = new KeyValueHeap(scanners, CellComparatorImpl.COMPARATOR)) {
|
||||||
|
|
||||||
List<Cell> actual = new ArrayList<>();
|
List<Cell> actual = new ArrayList<>();
|
||||||
while (kvh.peek() != null) {
|
while (kvh.peek() != null) {
|
||||||
actual.add(kvh.next());
|
actual.add(kvh.next());
|
||||||
@ -91,11 +92,6 @@ public class TestKeyValueHeap extends HBaseTestCase {
|
|||||||
assertEquals(expected, actual);
|
assertEquals(expected, actual);
|
||||||
return actual;
|
return actual;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
@Before
|
|
||||||
public void setUp() throws Exception {
|
|
||||||
super.setUp();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@ -104,8 +100,8 @@ public class TestKeyValueHeap extends HBaseTestCase {
|
|||||||
// 1. The "smallest" Cell is in the same scanners as current
|
// 1. The "smallest" Cell is in the same scanners as current
|
||||||
// 2. Current scanner gets empty
|
// 2. Current scanner gets empty
|
||||||
|
|
||||||
List<Cell> expected = Arrays.asList(
|
List<Cell> expected =
|
||||||
kv111, kv112, kv113, kv114, kv115, kv121, kv122, kv211, kv212, kv213);
|
Arrays.asList(kv111, kv112, kv113, kv114, kv115, kv121, kv122, kv211, kv212, kv213);
|
||||||
|
|
||||||
List<Cell> actual = assertCells(expected, scanners);
|
List<Cell> actual = assertCells(expected, scanners);
|
||||||
|
|
||||||
@ -121,20 +117,18 @@ public class TestKeyValueHeap extends HBaseTestCase {
|
|||||||
// Cases:
|
// Cases:
|
||||||
// 1. Seek Cell that is not in scanner
|
// 1. Seek Cell that is not in scanner
|
||||||
// 2. Check that smallest that is returned from a seek is correct
|
// 2. Check that smallest that is returned from a seek is correct
|
||||||
|
|
||||||
List<Cell> expected = Arrays.asList(kv211);
|
List<Cell> expected = Arrays.asList(kv211);
|
||||||
|
|
||||||
// Creating KeyValueHeap
|
// Creating KeyValueHeap
|
||||||
KeyValueHeap kvh =
|
try (KeyValueHeap kvh = new KeyValueHeap(scanners, CellComparatorImpl.COMPARATOR)) {
|
||||||
new KeyValueHeap(scanners, CellComparatorImpl.COMPARATOR);
|
|
||||||
|
|
||||||
Cell seekKv = new KeyValue(row2, fam1, null, null);
|
Cell seekKv = new KeyValue(row2, fam1, null, null);
|
||||||
kvh.seek(seekKv);
|
kvh.seek(seekKv);
|
||||||
|
|
||||||
List<Cell> actual = Arrays.asList(kvh.peek());
|
List<Cell> actual = Arrays.asList(kvh.peek());
|
||||||
|
|
||||||
assertEquals("Expected = " + Arrays.toString(expected.toArray())
|
assertEquals("Expected = " + Arrays.toString(expected.toArray()) + "\n Actual = " +
|
||||||
+ "\n Actual = " + Arrays.toString(actual.toArray()), expected, actual);
|
Arrays.toString(actual.toArray()), expected, actual);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
@ -145,17 +139,22 @@ public class TestKeyValueHeap extends HBaseTestCase {
|
|||||||
scanners.add(s4);
|
scanners.add(s4);
|
||||||
|
|
||||||
// Creating KeyValueHeap
|
// Creating KeyValueHeap
|
||||||
KeyValueHeap kvh = new KeyValueHeap(scanners, CellComparatorImpl.COMPARATOR);
|
try (KeyValueHeap kvh = new KeyValueHeap(scanners, CellComparatorImpl.COMPARATOR)) {
|
||||||
|
for (;;) {
|
||||||
while(kvh.next() != null);
|
if (kvh.next() == null) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
// Once the internal scanners go out of Cells, those will be removed from KVHeap's priority
|
// Once the internal scanners go out of Cells, those will be removed from KVHeap's priority
|
||||||
// queue and added to a Set for lazy close. The actual close will happen only on KVHeap#close()
|
// queue and added to a Set for lazy close. The actual close will happen only on
|
||||||
|
// KVHeap#close()
|
||||||
assertEquals(4, kvh.scannersForDelayedClose.size());
|
assertEquals(4, kvh.scannersForDelayedClose.size());
|
||||||
assertTrue(kvh.scannersForDelayedClose.contains(s1));
|
assertTrue(kvh.scannersForDelayedClose.contains(s1));
|
||||||
assertTrue(kvh.scannersForDelayedClose.contains(s2));
|
assertTrue(kvh.scannersForDelayedClose.contains(s2));
|
||||||
assertTrue(kvh.scannersForDelayedClose.contains(s3));
|
assertTrue(kvh.scannersForDelayedClose.contains(s3));
|
||||||
assertTrue(kvh.scannersForDelayedClose.contains(s4));
|
assertTrue(kvh.scannersForDelayedClose.contains(s4));
|
||||||
kvh.close();
|
}
|
||||||
|
|
||||||
for (KeyValueScanner scanner : scanners) {
|
for (KeyValueScanner scanner : scanners) {
|
||||||
assertTrue(((TestScanner) scanner).isClosed());
|
assertTrue(((TestScanner) scanner).isClosed());
|
||||||
}
|
}
|
||||||
@ -173,19 +172,19 @@ public class TestKeyValueHeap extends HBaseTestCase {
|
|||||||
List<KeyValueScanner> scanners = new ArrayList<>(Arrays.asList(s1, s2, s3, s4));
|
List<KeyValueScanner> scanners = new ArrayList<>(Arrays.asList(s1, s2, s3, s4));
|
||||||
|
|
||||||
// Creating KeyValueHeap
|
// Creating KeyValueHeap
|
||||||
KeyValueHeap kvh = new KeyValueHeap(scanners, CellComparatorImpl.COMPARATOR);
|
try (KeyValueHeap kvh = new KeyValueHeap(scanners, CellComparatorImpl.COMPARATOR)) {
|
||||||
|
|
||||||
try {
|
|
||||||
for (KeyValueScanner scanner : scanners) {
|
for (KeyValueScanner scanner : scanners) {
|
||||||
((SeekTestScanner) scanner).setRealSeekDone(false);
|
((SeekTestScanner) scanner).setRealSeekDone(false);
|
||||||
}
|
}
|
||||||
while (kvh.next() != null);
|
|
||||||
// The pollRealKV should throw IOE.
|
// The pollRealKV should throw IOE.
|
||||||
assertTrue(false);
|
assertThrows(IOException.class, () -> {
|
||||||
} catch (IOException ioe) {
|
for (;;) {
|
||||||
kvh.close();
|
if (kvh.next() == null) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
// It implies there is no NPE thrown from kvh.close() if getting here
|
// It implies there is no NPE thrown from kvh.close() if getting here
|
||||||
for (KeyValueScanner scanner : scanners) {
|
for (KeyValueScanner scanner : scanners) {
|
||||||
// Verify that close is called and only called once for each scanner
|
// Verify that close is called and only called once for each scanner
|
||||||
@ -198,18 +197,15 @@ public class TestKeyValueHeap extends HBaseTestCase {
|
|||||||
public void testPriorityId() throws IOException {
|
public void testPriorityId() throws IOException {
|
||||||
Cell kv113A = new KeyValue(row1, fam1, col3, Bytes.toBytes("aaa"));
|
Cell kv113A = new KeyValue(row1, fam1, col3, Bytes.toBytes("aaa"));
|
||||||
Cell kv113B = new KeyValue(row1, fam1, col3, Bytes.toBytes("bbb"));
|
Cell kv113B = new KeyValue(row1, fam1, col3, Bytes.toBytes("bbb"));
|
||||||
{
|
|
||||||
TestScanner scan1 = new TestScanner(Arrays.asList(kv111, kv112, kv113A), 1);
|
TestScanner scan1 = new TestScanner(Arrays.asList(kv111, kv112, kv113A), 1);
|
||||||
TestScanner scan2 = new TestScanner(Arrays.asList(kv113B), 2);
|
TestScanner scan2 = new TestScanner(Arrays.asList(kv113B), 2);
|
||||||
List<Cell> expected = Arrays.asList(kv111, kv112, kv113B, kv113A);
|
List<Cell> expected = Arrays.asList(kv111, kv112, kv113B, kv113A);
|
||||||
assertCells(expected, new ArrayList<>(Arrays.asList(scan1, scan2)));
|
assertCells(expected, Arrays.asList(scan1, scan2));
|
||||||
}
|
|
||||||
{
|
scan1 = new TestScanner(Arrays.asList(kv111, kv112, kv113A), 2);
|
||||||
TestScanner scan1 = new TestScanner(Arrays.asList(kv111, kv112, kv113A), 2);
|
scan2 = new TestScanner(Arrays.asList(kv113B), 1);
|
||||||
TestScanner scan2 = new TestScanner(Arrays.asList(kv113B), 1);
|
expected = Arrays.asList(kv111, kv112, kv113A, kv113B);
|
||||||
List<Cell> expected = Arrays.asList(kv111, kv112, kv113A, kv113B);
|
assertCells(expected, Arrays.asList(scan1, scan2));
|
||||||
assertCells(expected, new ArrayList<>(Arrays.asList(scan1, scan2)));
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private static class TestScanner extends CollectionBackedScanner {
|
private static class TestScanner extends CollectionBackedScanner {
|
||||||
|
@ -37,11 +37,11 @@ import org.apache.hadoop.conf.Configuration;
|
|||||||
import org.apache.hadoop.hbase.Cell;
|
import org.apache.hadoop.hbase.Cell;
|
||||||
import org.apache.hadoop.hbase.CellUtil;
|
import org.apache.hadoop.hbase.CellUtil;
|
||||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
|
||||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.HTestConst;
|
||||||
import org.apache.hadoop.hbase.KeepDeletedCells;
|
import org.apache.hadoop.hbase.KeepDeletedCells;
|
||||||
import org.apache.hadoop.hbase.TableName;
|
import org.apache.hadoop.hbase.TableName;
|
||||||
import org.apache.hadoop.hbase.client.Delete;
|
import org.apache.hadoop.hbase.client.Delete;
|
||||||
@ -88,7 +88,9 @@ public class TestMajorCompaction {
|
|||||||
public static Object[] data() {
|
public static Object[] data() {
|
||||||
return new Object[] { "NONE", "BASIC", "EAGER" };
|
return new Object[] { "NONE", "BASIC", "EAGER" };
|
||||||
}
|
}
|
||||||
@Rule public TestName name;
|
|
||||||
|
@Rule
|
||||||
|
public TestName name;
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(TestMajorCompaction.class.getName());
|
private static final Logger LOG = LoggerFactory.getLogger(TestMajorCompaction.class.getName());
|
||||||
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
|
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
|
||||||
protected Configuration conf = UTIL.getConfiguration();
|
protected Configuration conf = UTIL.getConfiguration();
|
||||||
@ -137,9 +139,9 @@ public class TestMajorCompaction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test that on a major compaction, if all cells are expired or deleted, then
|
* Test that on a major compaction, if all cells are expired or deleted, then we'll end up with no
|
||||||
* we'll end up with no product. Make sure scanner over region returns
|
* product. Make sure scanner over region returns right answer in this case - and that it just
|
||||||
* right answer in this case - and that it just basically works.
|
* basically works.
|
||||||
* @throws IOException exception encountered
|
* @throws IOException exception encountered
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
@ -157,8 +159,7 @@ public class TestMajorCompaction {
|
|||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Run compaction and flushing memstore
|
* Run compaction and flushing memstore Assert deletes get cleaned up.
|
||||||
* Assert deletes get cleaned up.
|
|
||||||
* @throws Exception
|
* @throws Exception
|
||||||
*/
|
*/
|
||||||
@Test
|
@Test
|
||||||
@ -176,15 +177,13 @@ public class TestMajorCompaction {
|
|||||||
majorCompactionWithDataBlockEncoding(false);
|
majorCompactionWithDataBlockEncoding(false);
|
||||||
}
|
}
|
||||||
|
|
||||||
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly)
|
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly) throws Exception {
|
||||||
throws Exception {
|
|
||||||
Map<HStore, HFileDataBlockEncoder> replaceBlockCache = new HashMap<>();
|
Map<HStore, HFileDataBlockEncoder> replaceBlockCache = new HashMap<>();
|
||||||
for (HStore store : r.getStores()) {
|
for (HStore store : r.getStores()) {
|
||||||
HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
|
HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
|
||||||
replaceBlockCache.put(store, blockEncoder);
|
replaceBlockCache.put(store, blockEncoder);
|
||||||
final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
|
final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
|
||||||
final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE :
|
final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE : inCache;
|
||||||
inCache;
|
|
||||||
((HStore) store).setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(onDisk));
|
((HStore) store).setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(onDisk));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -202,7 +201,7 @@ public class TestMajorCompaction {
|
|||||||
createStoreFile(r);
|
createStoreFile(r);
|
||||||
}
|
}
|
||||||
// Add more content.
|
// Add more content.
|
||||||
HBaseTestCase.addContent(new RegionAsTable(r), Bytes.toString(COLUMN_FAMILY));
|
HTestConst.addContent(new RegionAsTable(r), Bytes.toString(COLUMN_FAMILY));
|
||||||
|
|
||||||
// Now there are about 5 versions of each column.
|
// Now there are about 5 versions of each column.
|
||||||
// Default is that there only 3 (MAXVERSIONS) versions allowed per column.
|
// Default is that there only 3 (MAXVERSIONS) versions allowed per column.
|
||||||
@ -238,11 +237,10 @@ public class TestMajorCompaction {
|
|||||||
|
|
||||||
// Always 3 versions if that is what max versions is.
|
// Always 3 versions if that is what max versions is.
|
||||||
result = r.get(new Get(secondRowBytes).addFamily(COLUMN_FAMILY_TEXT).readVersions(100));
|
result = r.get(new Get(secondRowBytes).addFamily(COLUMN_FAMILY_TEXT).readVersions(100));
|
||||||
LOG.debug("Row " + Bytes.toStringBinary(secondRowBytes) + " after " +
|
LOG.debug(
|
||||||
"initial compaction: " + result);
|
"Row " + Bytes.toStringBinary(secondRowBytes) + " after " + "initial compaction: " + result);
|
||||||
assertEquals("Invalid number of versions of row "
|
assertEquals("Invalid number of versions of row " + Bytes.toStringBinary(secondRowBytes) + ".",
|
||||||
+ Bytes.toStringBinary(secondRowBytes) + ".", compactionThreshold,
|
compactionThreshold, result.size());
|
||||||
result.size());
|
|
||||||
|
|
||||||
// Now add deletes to memstore and then flush it.
|
// Now add deletes to memstore and then flush it.
|
||||||
// That will put us over
|
// That will put us over
|
||||||
@ -319,8 +317,8 @@ public class TestMajorCompaction {
|
|||||||
assertEquals(2, s.getStorefilesCount());
|
assertEquals(2, s.getStorefilesCount());
|
||||||
|
|
||||||
// ensure that major compaction time is deterministic
|
// ensure that major compaction time is deterministic
|
||||||
RatioBasedCompactionPolicy
|
RatioBasedCompactionPolicy c =
|
||||||
c = (RatioBasedCompactionPolicy)s.storeEngine.getCompactionPolicy();
|
(RatioBasedCompactionPolicy) s.storeEngine.getCompactionPolicy();
|
||||||
Collection<HStoreFile> storeFiles = s.getStorefiles();
|
Collection<HStoreFile> storeFiles = s.getStorefiles();
|
||||||
long mcTime = c.getNextMajorCompactTime(storeFiles);
|
long mcTime = c.getNextMajorCompactTime(storeFiles);
|
||||||
for (int i = 0; i < 10; ++i) {
|
for (int i = 0; i < 10; ++i) {
|
||||||
@ -367,7 +365,6 @@ public class TestMajorCompaction {
|
|||||||
assertEquals(countRow2, count2);
|
assertEquals(countRow2, count2);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
private int count() throws IOException {
|
private int count() throws IOException {
|
||||||
int count = 0;
|
int count = 0;
|
||||||
for (HStoreFile f : r.getStore(COLUMN_FAMILY_TEXT).getStorefiles()) {
|
for (HStoreFile f : r.getStore(COLUMN_FAMILY_TEXT).getStorefiles()) {
|
||||||
@ -388,14 +385,13 @@ public class TestMajorCompaction {
|
|||||||
|
|
||||||
private void createStoreFile(final HRegion region, String family) throws IOException {
|
private void createStoreFile(final HRegion region, String family) throws IOException {
|
||||||
Table loader = new RegionAsTable(region);
|
Table loader = new RegionAsTable(region);
|
||||||
HBaseTestCase.addContent(loader, family);
|
HTestConst.addContent(loader, family);
|
||||||
region.flush(true);
|
region.flush(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void createSmallerStoreFile(final HRegion region) throws IOException {
|
private void createSmallerStoreFile(final HRegion region) throws IOException {
|
||||||
Table loader = new RegionAsTable(region);
|
Table loader = new RegionAsTable(region);
|
||||||
HBaseTestCase.addContent(loader, Bytes.toString(COLUMN_FAMILY), Bytes.toBytes("" +
|
HTestConst.addContent(loader, Bytes.toString(COLUMN_FAMILY), Bytes.toBytes("" + "bbb"), null);
|
||||||
"bbb"), null);
|
|
||||||
region.flush(true);
|
region.flush(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -414,8 +410,7 @@ public class TestMajorCompaction {
|
|||||||
CompactionRequestImpl request = store.requestCompaction().get().getRequest();
|
CompactionRequestImpl request = store.requestCompaction().get().getRequest();
|
||||||
assertNotNull("Expected to receive a compaction request", request);
|
assertNotNull("Expected to receive a compaction request", request);
|
||||||
assertEquals(
|
assertEquals(
|
||||||
"System-requested major compaction should not occur if there are too many store files",
|
"System-requested major compaction should not occur if there are too many store files", false,
|
||||||
false,
|
|
||||||
request.isMajor());
|
request.isMajor());
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -430,14 +425,12 @@ public class TestMajorCompaction {
|
|||||||
createStoreFile(r);
|
createStoreFile(r);
|
||||||
}
|
}
|
||||||
store.triggerMajorCompaction();
|
store.triggerMajorCompaction();
|
||||||
CompactionRequestImpl request =
|
CompactionRequestImpl request = store
|
||||||
store.requestCompaction(PRIORITY_USER, CompactionLifeCycleTracker.DUMMY, null).get()
|
.requestCompaction(PRIORITY_USER, CompactionLifeCycleTracker.DUMMY, null).get().getRequest();
|
||||||
.getRequest();
|
|
||||||
assertNotNull("Expected to receive a compaction request", request);
|
assertNotNull("Expected to receive a compaction request", request);
|
||||||
assertEquals(
|
assertEquals(
|
||||||
"User-requested major compaction should always occur, even if there are too many store files",
|
"User-requested major compaction should always occur, even if there are too many store files",
|
||||||
true,
|
true, request.isMajor());
|
||||||
request.isMajor());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -22,13 +22,14 @@ import static org.apache.hadoop.hbase.HBaseTestingUtility.fam1;
|
|||||||
import static org.apache.hadoop.hbase.HBaseTestingUtility.fam2;
|
import static org.apache.hadoop.hbase.HBaseTestingUtility.fam2;
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
import static org.junit.Assert.assertTrue;
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
|
||||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.HTestConst;
|
||||||
import org.apache.hadoop.hbase.TableName;
|
import org.apache.hadoop.hbase.TableName;
|
||||||
import org.apache.hadoop.hbase.client.Delete;
|
import org.apache.hadoop.hbase.client.Delete;
|
||||||
import org.apache.hadoop.hbase.client.Get;
|
import org.apache.hadoop.hbase.client.Get;
|
||||||
@ -174,13 +175,13 @@ public class TestMinorCompaction {
|
|||||||
throws Exception {
|
throws Exception {
|
||||||
Table loader = new RegionAsTable(r);
|
Table loader = new RegionAsTable(r);
|
||||||
for (int i = 0; i < compactionThreshold + 1; i++) {
|
for (int i = 0; i < compactionThreshold + 1; i++) {
|
||||||
HBaseTestCase.addContent(loader, Bytes.toString(fam1), Bytes.toString(col1), firstRowBytes,
|
HTestConst.addContent(loader, Bytes.toString(fam1), Bytes.toString(col1), firstRowBytes,
|
||||||
thirdRowBytes, i);
|
thirdRowBytes, i);
|
||||||
HBaseTestCase.addContent(loader, Bytes.toString(fam1), Bytes.toString(col2), firstRowBytes,
|
HTestConst.addContent(loader, Bytes.toString(fam1), Bytes.toString(col2), firstRowBytes,
|
||||||
thirdRowBytes, i);
|
thirdRowBytes, i);
|
||||||
HBaseTestCase.addContent(loader, Bytes.toString(fam2), Bytes.toString(col1), firstRowBytes,
|
HTestConst.addContent(loader, Bytes.toString(fam2), Bytes.toString(col1), firstRowBytes,
|
||||||
thirdRowBytes, i);
|
thirdRowBytes, i);
|
||||||
HBaseTestCase.addContent(loader, Bytes.toString(fam2), Bytes.toString(col2), firstRowBytes,
|
HTestConst.addContent(loader, Bytes.toString(fam2), Bytes.toString(col2), firstRowBytes,
|
||||||
thirdRowBytes, i);
|
thirdRowBytes, i);
|
||||||
r.flush(true);
|
r.flush(true);
|
||||||
}
|
}
|
||||||
|
@ -32,10 +32,10 @@ import java.util.List;
|
|||||||
import org.apache.hadoop.hbase.Cell;
|
import org.apache.hadoop.hbase.Cell;
|
||||||
import org.apache.hadoop.hbase.CellUtil;
|
import org.apache.hadoop.hbase.CellUtil;
|
||||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
|
||||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.HTestConst;
|
||||||
import org.apache.hadoop.hbase.TableName;
|
import org.apache.hadoop.hbase.TableName;
|
||||||
import org.apache.hadoop.hbase.UnknownScannerException;
|
import org.apache.hadoop.hbase.UnknownScannerException;
|
||||||
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
|
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
|
||||||
@ -133,7 +133,7 @@ public class TestScanner {
|
|||||||
byte [] stoprow = Bytes.toBytes("ccc");
|
byte [] stoprow = Bytes.toBytes("ccc");
|
||||||
try {
|
try {
|
||||||
this.region = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
|
this.region = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
|
||||||
HBaseTestCase.addContent(this.region, HConstants.CATALOG_FAMILY);
|
HTestConst.addContent(this.region, HConstants.CATALOG_FAMILY);
|
||||||
List<Cell> results = new ArrayList<>();
|
List<Cell> results = new ArrayList<>();
|
||||||
// Do simple test of getting one row only first.
|
// Do simple test of getting one row only first.
|
||||||
Scan scan = new Scan().withStartRow(Bytes.toBytes("abc"))
|
Scan scan = new Scan().withStartRow(Bytes.toBytes("abc"))
|
||||||
@ -207,7 +207,7 @@ public class TestScanner {
|
|||||||
public void testFilters() throws IOException {
|
public void testFilters() throws IOException {
|
||||||
try {
|
try {
|
||||||
this.region = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
|
this.region = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
|
||||||
HBaseTestCase.addContent(this.region, HConstants.CATALOG_FAMILY);
|
HTestConst.addContent(this.region, HConstants.CATALOG_FAMILY);
|
||||||
byte [] prefix = Bytes.toBytes("ab");
|
byte [] prefix = Bytes.toBytes("ab");
|
||||||
Filter newFilter = new PrefixFilter(prefix);
|
Filter newFilter = new PrefixFilter(prefix);
|
||||||
Scan scan = new Scan();
|
Scan scan = new Scan();
|
||||||
@ -233,7 +233,7 @@ public class TestScanner {
|
|||||||
public void testRaceBetweenClientAndTimeout() throws Exception {
|
public void testRaceBetweenClientAndTimeout() throws Exception {
|
||||||
try {
|
try {
|
||||||
this.region = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
|
this.region = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
|
||||||
HBaseTestCase.addContent(this.region, HConstants.CATALOG_FAMILY);
|
HTestConst.addContent(this.region, HConstants.CATALOG_FAMILY);
|
||||||
Scan scan = new Scan();
|
Scan scan = new Scan();
|
||||||
InternalScanner s = region.getScanner(scan);
|
InternalScanner s = region.getScanner(scan);
|
||||||
List<Cell> results = new ArrayList<>();
|
List<Cell> results = new ArrayList<>();
|
||||||
@ -465,7 +465,7 @@ public class TestScanner {
|
|||||||
Table hri = new RegionAsTable(region);
|
Table hri = new RegionAsTable(region);
|
||||||
try {
|
try {
|
||||||
LOG.info("Added: " +
|
LOG.info("Added: " +
|
||||||
HBaseTestCase.addContent(hri, Bytes.toString(HConstants.CATALOG_FAMILY),
|
HTestConst.addContent(hri, Bytes.toString(HConstants.CATALOG_FAMILY),
|
||||||
Bytes.toString(HConstants.REGIONINFO_QUALIFIER)));
|
Bytes.toString(HConstants.REGIONINFO_QUALIFIER)));
|
||||||
int count = count(hri, -1, false);
|
int count = count(hri, -1, false);
|
||||||
assertEquals(count, count(hri, 100, false)); // do a sync flush.
|
assertEquals(count, count(hri, 100, false)); // do a sync flush.
|
||||||
@ -487,7 +487,7 @@ public class TestScanner {
|
|||||||
Table hri = new RegionAsTable(region);
|
Table hri = new RegionAsTable(region);
|
||||||
try {
|
try {
|
||||||
LOG.info("Added: " +
|
LOG.info("Added: " +
|
||||||
HBaseTestCase.addContent(hri, Bytes.toString(HConstants.CATALOG_FAMILY),
|
HTestConst.addContent(hri, Bytes.toString(HConstants.CATALOG_FAMILY),
|
||||||
Bytes.toString(HConstants.REGIONINFO_QUALIFIER)));
|
Bytes.toString(HConstants.REGIONINFO_QUALIFIER)));
|
||||||
int count = count(hri, -1, false);
|
int count = count(hri, -1, false);
|
||||||
assertEquals(count, count(hri, 100, true)); // do a true concurrent background thread flush
|
assertEquals(count, count(hri, 100, true)); // do a true concurrent background thread flush
|
||||||
@ -513,9 +513,9 @@ public class TestScanner {
|
|||||||
Table hri = new RegionAsTable(region);
|
Table hri = new RegionAsTable(region);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
HBaseTestCase.addContent(hri, Bytes.toString(fam1), Bytes.toString(col1),
|
HTestConst.addContent(hri, Bytes.toString(fam1), Bytes.toString(col1),
|
||||||
firstRowBytes, secondRowBytes);
|
firstRowBytes, secondRowBytes);
|
||||||
HBaseTestCase.addContent(hri, Bytes.toString(fam2), Bytes.toString(col1),
|
HTestConst.addContent(hri, Bytes.toString(fam2), Bytes.toString(col1),
|
||||||
firstRowBytes, secondRowBytes);
|
firstRowBytes, secondRowBytes);
|
||||||
|
|
||||||
Delete dc = new Delete(firstRowBytes);
|
Delete dc = new Delete(firstRowBytes);
|
||||||
@ -524,9 +524,9 @@ public class TestScanner {
|
|||||||
region.delete(dc);
|
region.delete(dc);
|
||||||
region.flush(true);
|
region.flush(true);
|
||||||
|
|
||||||
HBaseTestCase.addContent(hri, Bytes.toString(fam1), Bytes.toString(col1),
|
HTestConst.addContent(hri, Bytes.toString(fam1), Bytes.toString(col1),
|
||||||
secondRowBytes, thirdRowBytes);
|
secondRowBytes, thirdRowBytes);
|
||||||
HBaseTestCase.addContent(hri, Bytes.toString(fam2), Bytes.toString(col1),
|
HTestConst.addContent(hri, Bytes.toString(fam2), Bytes.toString(col1),
|
||||||
secondRowBytes, thirdRowBytes);
|
secondRowBytes, thirdRowBytes);
|
||||||
region.flush(true);
|
region.flush(true);
|
||||||
|
|
||||||
|
@ -17,26 +17,34 @@
|
|||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase.regionserver;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertEquals;
|
||||||
|
import static org.junit.Assert.assertTrue;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Random;
|
import java.util.concurrent.ThreadLocalRandom;
|
||||||
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hbase.Cell;
|
import org.apache.hadoop.hbase.Cell;
|
||||||
import org.apache.hadoop.hbase.CellUtil;
|
import org.apache.hadoop.hbase.CellUtil;
|
||||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
|
||||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||||
import org.apache.hadoop.hbase.TableName;
|
import org.apache.hadoop.hbase.TableName;
|
||||||
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
|
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
|
||||||
import org.apache.hadoop.hbase.client.Durability;
|
import org.apache.hadoop.hbase.client.Durability;
|
||||||
import org.apache.hadoop.hbase.client.Put;
|
import org.apache.hadoop.hbase.client.Put;
|
||||||
|
import org.apache.hadoop.hbase.client.RegionInfo;
|
||||||
|
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
|
||||||
import org.apache.hadoop.hbase.client.Scan;
|
import org.apache.hadoop.hbase.client.Scan;
|
||||||
|
import org.apache.hadoop.hbase.client.TableDescriptor;
|
||||||
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
|
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
|
||||||
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
|
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
|
||||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
|
import org.junit.AfterClass;
|
||||||
|
import org.junit.BeforeClass;
|
||||||
import org.junit.ClassRule;
|
import org.junit.ClassRule;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.junit.experimental.categories.Category;
|
import org.junit.experimental.categories.Category;
|
||||||
@ -44,34 +52,52 @@ import org.slf4j.Logger;
|
|||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
@Category({ RegionServerTests.class, SmallTests.class })
|
@Category({ RegionServerTests.class, SmallTests.class })
|
||||||
public class TestWideScanner extends HBaseTestCase {
|
public class TestWideScanner {
|
||||||
|
|
||||||
@ClassRule
|
@ClassRule
|
||||||
public static final HBaseClassTestRule CLASS_RULE =
|
public static final HBaseClassTestRule CLASS_RULE =
|
||||||
HBaseClassTestRule.forClass(TestWideScanner.class);
|
HBaseClassTestRule.forClass(TestWideScanner.class);
|
||||||
|
|
||||||
|
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
|
||||||
|
|
||||||
private static final Logger LOG = LoggerFactory.getLogger(TestWideScanner.class);
|
private static final Logger LOG = LoggerFactory.getLogger(TestWideScanner.class);
|
||||||
|
|
||||||
static final byte[] A = Bytes.toBytes("A");
|
private static final byte[] A = Bytes.toBytes("A");
|
||||||
static final byte[] B = Bytes.toBytes("B");
|
private static final byte[] B = Bytes.toBytes("B");
|
||||||
static final byte[] C = Bytes.toBytes("C");
|
private static final byte[] C = Bytes.toBytes("C");
|
||||||
static byte[][] COLUMNS = { A, B, C };
|
private static byte[][] COLUMNS = { A, B, C };
|
||||||
static final Random rng = new Random();
|
|
||||||
static final TableDescriptorBuilder.ModifyableTableDescriptor TESTTABLEDESC =
|
private static final TableDescriptor TESTTABLEDESC;
|
||||||
new TableDescriptorBuilder.ModifyableTableDescriptor(TableName.valueOf("testwidescan"));
|
|
||||||
static {
|
static {
|
||||||
|
TableDescriptorBuilder builder =
|
||||||
|
TableDescriptorBuilder.newBuilder(TableName.valueOf("testwidescan"));
|
||||||
for (byte[] cfName : new byte[][] { A, B, C }) {
|
for (byte[] cfName : new byte[][] { A, B, C }) {
|
||||||
TESTTABLEDESC.setColumnFamily(
|
|
||||||
new ColumnFamilyDescriptorBuilder.ModifyableColumnFamilyDescriptor(cfName)
|
|
||||||
// Keep versions to help debugging.
|
// Keep versions to help debugging.
|
||||||
.setMaxVersions(100)
|
builder.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(cfName).setMaxVersions(100)
|
||||||
.setBlocksize(8 * 1024)
|
.setBlocksize(8 * 1024).build());
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
TESTTABLEDESC = builder.build();
|
||||||
}
|
}
|
||||||
|
|
||||||
/** HRegionInfo for root region */
|
/** HRegionInfo for root region */
|
||||||
HRegion r;
|
private static HRegion REGION;
|
||||||
|
|
||||||
|
@BeforeClass
|
||||||
|
public static void setUp() throws IOException {
|
||||||
|
Path testDir = UTIL.getDataTestDir();
|
||||||
|
RegionInfo hri = RegionInfoBuilder.newBuilder(TESTTABLEDESC.getTableName()).build();
|
||||||
|
REGION =
|
||||||
|
HBaseTestingUtility.createRegionAndWAL(hri, testDir, UTIL.getConfiguration(), TESTTABLEDESC);
|
||||||
|
}
|
||||||
|
|
||||||
|
@AfterClass
|
||||||
|
public static void tearDown() throws IOException {
|
||||||
|
if (REGION != null) {
|
||||||
|
HBaseTestingUtility.closeRegionAndWAL(REGION);
|
||||||
|
REGION = null;
|
||||||
|
}
|
||||||
|
UTIL.cleanupTestDir();
|
||||||
|
}
|
||||||
|
|
||||||
private int addWideContent(HRegion region) throws IOException {
|
private int addWideContent(HRegion region) throws IOException {
|
||||||
int count = 0;
|
int count = 0;
|
||||||
@ -85,7 +111,7 @@ public class TestWideScanner extends HBaseTestCase {
|
|||||||
Put put = new Put(row);
|
Put put = new Put(row);
|
||||||
put.setDurability(Durability.SKIP_WAL);
|
put.setDurability(Durability.SKIP_WAL);
|
||||||
long ts1 = ++ts;
|
long ts1 = ++ts;
|
||||||
put.addColumn(COLUMNS[rng.nextInt(COLUMNS.length)], b, ts1, b);
|
put.addColumn(COLUMNS[ThreadLocalRandom.current().nextInt(COLUMNS.length)], b, ts1, b);
|
||||||
region.put(put);
|
region.put(put);
|
||||||
count++;
|
count++;
|
||||||
}
|
}
|
||||||
@ -97,9 +123,7 @@ public class TestWideScanner extends HBaseTestCase {
|
|||||||
@Test
|
@Test
|
||||||
public void testWideScanBatching() throws IOException {
|
public void testWideScanBatching() throws IOException {
|
||||||
final int batch = 256;
|
final int batch = 256;
|
||||||
try {
|
int inserted = addWideContent(REGION);
|
||||||
this.r = createNewHRegion(TESTTABLEDESC, null, null);
|
|
||||||
int inserted = addWideContent(this.r);
|
|
||||||
List<Cell> results = new ArrayList<>();
|
List<Cell> results = new ArrayList<>();
|
||||||
Scan scan = new Scan();
|
Scan scan = new Scan();
|
||||||
scan.addFamily(A);
|
scan.addFamily(A);
|
||||||
@ -107,7 +131,7 @@ public class TestWideScanner extends HBaseTestCase {
|
|||||||
scan.addFamily(C);
|
scan.addFamily(C);
|
||||||
scan.readVersions(100);
|
scan.readVersions(100);
|
||||||
scan.setBatch(batch);
|
scan.setBatch(batch);
|
||||||
InternalScanner s = r.getScanner(scan);
|
try (InternalScanner s = REGION.getScanner(scan)) {
|
||||||
int total = 0;
|
int total = 0;
|
||||||
int i = 0;
|
int i = 0;
|
||||||
boolean more;
|
boolean more;
|
||||||
@ -136,19 +160,13 @@ public class TestWideScanner extends HBaseTestCase {
|
|||||||
((HRegion.RegionScannerImpl) s).storeHeap.getHeap().iterator();
|
((HRegion.RegionScannerImpl) s).storeHeap.getHeap().iterator();
|
||||||
while (scanners.hasNext()) {
|
while (scanners.hasNext()) {
|
||||||
StoreScanner ss = (StoreScanner) scanners.next();
|
StoreScanner ss = (StoreScanner) scanners.next();
|
||||||
ss.updateReaders(Collections.EMPTY_LIST, Collections.EMPTY_LIST);
|
ss.updateReaders(Collections.emptyList(), Collections.emptyList());
|
||||||
}
|
}
|
||||||
} while (more);
|
} while (more);
|
||||||
|
|
||||||
// assert that the scanner returned all values
|
// assert that the scanner returned all values
|
||||||
LOG.info("inserted " + inserted + ", scanned " + total);
|
LOG.info("inserted " + inserted + ", scanned " + total);
|
||||||
assertEquals(total, inserted);
|
assertEquals(total, inserted);
|
||||||
|
|
||||||
s.close();
|
|
||||||
} finally {
|
|
||||||
HBaseTestingUtility.closeRegionAndWAL(this.r);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user