HBASE-26724 Backport the UT changes in HBASE-24510 to branch-2.x (#4081)
Signed-off-by: Xin Sun <ddupgs@gmail.com>
This commit is contained in:
parent
4e15101fa1
commit
12002e6a79
|
@ -16,16 +16,21 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Arrays;
|
||||
import java.util.HashSet;
|
||||
import java.util.Set;
|
||||
import java.util.Collections;
|
||||
|
||||
import org.apache.hadoop.hbase.client.Durability;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.client.Table;
|
||||
import org.apache.hadoop.hbase.regionserver.Region;
|
||||
import org.apache.hadoop.hbase.regionserver.RegionAsTable;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
||||
/**
|
||||
* Similar to {@link HConstants} but for tests. Also provides some simple
|
||||
* static utility functions to generate test data.
|
||||
* Similar to {@link HConstants} but for tests. Also provides some simple static utility functions
|
||||
* to generate test data.
|
||||
*/
|
||||
public class HTestConst {
|
||||
|
||||
|
@ -34,15 +39,13 @@ public class HTestConst {
|
|||
|
||||
public static final String DEFAULT_TABLE_STR = "MyTestTable";
|
||||
public static final byte[] DEFAULT_TABLE_BYTES = Bytes.toBytes(DEFAULT_TABLE_STR);
|
||||
public static final TableName DEFAULT_TABLE =
|
||||
TableName.valueOf(DEFAULT_TABLE_BYTES);
|
||||
public static final TableName DEFAULT_TABLE = TableName.valueOf(DEFAULT_TABLE_BYTES);
|
||||
|
||||
public static final String DEFAULT_CF_STR = "MyDefaultCF";
|
||||
public static final byte[] DEFAULT_CF_BYTES = Bytes.toBytes(DEFAULT_CF_STR);
|
||||
|
||||
public static final Set<String> DEFAULT_CF_STR_SET =
|
||||
Collections.unmodifiableSet(new HashSet<>(
|
||||
Arrays.asList(new String[] { DEFAULT_CF_STR })));
|
||||
Collections.unmodifiableSet(new HashSet<>(Arrays.asList(new String[] { DEFAULT_CF_STR })));
|
||||
|
||||
public static final String DEFAULT_ROW_STR = "MyTestRow";
|
||||
public static final byte[] DEFAULT_ROW_BYTES = Bytes.toBytes(DEFAULT_ROW_STR);
|
||||
|
@ -53,12 +56,16 @@ public class HTestConst {
|
|||
public static String DEFAULT_VALUE_STR = "MyTestValue";
|
||||
public static byte[] DEFAULT_VALUE_BYTES = Bytes.toBytes(DEFAULT_VALUE_STR);
|
||||
|
||||
private static final char FIRST_CHAR = 'a';
|
||||
private static final char LAST_CHAR = 'z';
|
||||
private static final byte[] START_KEY_BYTES = { FIRST_CHAR, FIRST_CHAR, FIRST_CHAR };
|
||||
|
||||
/**
|
||||
* Generate the given number of unique byte sequences by appending numeric
|
||||
* suffixes (ASCII representations of decimal numbers).
|
||||
* Generate the given number of unique byte sequences by appending numeric suffixes (ASCII
|
||||
* representations of decimal numbers).
|
||||
*/
|
||||
public static byte[][] makeNAscii(byte[] base, int n) {
|
||||
byte [][] ret = new byte[n][];
|
||||
byte[][] ret = new byte[n][];
|
||||
for (int i = 0; i < n; i++) {
|
||||
byte[] tail = Bytes.toBytes(Integer.toString(i));
|
||||
ret[i] = Bytes.add(base, tail);
|
||||
|
@ -66,4 +73,112 @@ public class HTestConst {
|
|||
return ret;
|
||||
}
|
||||
|
||||
/**
|
||||
* Add content to region <code>r</code> on the passed column <code>column</code>. Adds data of the
|
||||
* from 'aaa', 'aab', etc where key and value are the same.
|
||||
* @return count of what we added.
|
||||
*/
|
||||
public static long addContent(final Region r, final byte[] columnFamily, final byte[] column)
|
||||
throws IOException {
|
||||
byte[] startKey = r.getRegionInfo().getStartKey();
|
||||
byte[] endKey = r.getRegionInfo().getEndKey();
|
||||
byte[] startKeyBytes = startKey;
|
||||
if (startKeyBytes == null || startKeyBytes.length == 0) {
|
||||
startKeyBytes = START_KEY_BYTES;
|
||||
}
|
||||
return addContent(new RegionAsTable(r), Bytes.toString(columnFamily), Bytes.toString(column),
|
||||
startKeyBytes, endKey, -1);
|
||||
}
|
||||
|
||||
public static long addContent(final Region r, final byte[] columnFamily) throws IOException {
|
||||
return addContent(r, columnFamily, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add content to region <code>r</code> on the passed column <code>column</code>. Adds data of the
|
||||
* from 'aaa', 'aab', etc where key and value are the same.
|
||||
* @return count of what we added.
|
||||
*/
|
||||
public static long addContent(Table updater, String columnFamily) throws IOException {
|
||||
return addContent(updater, columnFamily, START_KEY_BYTES, null);
|
||||
}
|
||||
|
||||
public static long addContent(Table updater, String family, String column) throws IOException {
|
||||
return addContent(updater, family, column, START_KEY_BYTES, null);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add content to region <code>r</code> on the passed column <code>column</code>. Adds data of the
|
||||
* from 'aaa', 'aab', etc where key and value are the same.
|
||||
* @return count of what we added.
|
||||
*/
|
||||
public static long addContent(Table updater, String columnFamily, byte[] startKeyBytes,
|
||||
byte[] endKey) throws IOException {
|
||||
return addContent(updater, columnFamily, null, startKeyBytes, endKey, -1);
|
||||
}
|
||||
|
||||
public static long addContent(Table updater, String family, String column, byte[] startKeyBytes,
|
||||
byte[] endKey) throws IOException {
|
||||
return addContent(updater, family, column, startKeyBytes, endKey, -1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add content to region <code>r</code> on the passed column <code>column</code>. Adds data of the
|
||||
* from 'aaa', 'aab', etc where key and value are the same.
|
||||
* @return count of what we added.
|
||||
*/
|
||||
public static long addContent(Table updater, String columnFamily, String column,
|
||||
byte[] startKeyBytes, byte[] endKey, long ts) throws IOException {
|
||||
long count = 0;
|
||||
// Add rows of three characters. The first character starts with the
|
||||
// 'a' character and runs up to 'z'. Per first character, we run the
|
||||
// second character over same range. And same for the third so rows
|
||||
// (and values) look like this: 'aaa', 'aab', 'aac', etc.
|
||||
char secondCharStart = (char) startKeyBytes[1];
|
||||
char thirdCharStart = (char) startKeyBytes[2];
|
||||
EXIT: for (char c = (char) startKeyBytes[0]; c <= LAST_CHAR; c++) {
|
||||
for (char d = secondCharStart; d <= LAST_CHAR; d++) {
|
||||
for (char e = thirdCharStart; e <= LAST_CHAR; e++) {
|
||||
byte[] t = new byte[] { (byte) c, (byte) d, (byte) e };
|
||||
if (endKey != null && endKey.length > 0 && Bytes.compareTo(endKey, t) <= 0) {
|
||||
break EXIT;
|
||||
}
|
||||
Put put;
|
||||
if (ts != -1) {
|
||||
put = new Put(t, ts);
|
||||
} else {
|
||||
put = new Put(t);
|
||||
}
|
||||
StringBuilder sb = new StringBuilder();
|
||||
if (column != null && column.contains(":")) {
|
||||
sb.append(column);
|
||||
} else {
|
||||
if (columnFamily != null) {
|
||||
sb.append(columnFamily);
|
||||
if (!columnFamily.endsWith(":")) {
|
||||
sb.append(":");
|
||||
}
|
||||
if (column != null) {
|
||||
sb.append(column);
|
||||
}
|
||||
}
|
||||
}
|
||||
byte[][] split = CellUtil.parseColumn(Bytes.toBytes(sb.toString()));
|
||||
if (split.length == 1) {
|
||||
byte[] qualifier = new byte[0];
|
||||
put.addColumn(split[0], qualifier, t);
|
||||
} else {
|
||||
put.addColumn(split[0], split[1], t);
|
||||
}
|
||||
put.setDurability(Durability.SKIP_WAL);
|
||||
updater.put(put);
|
||||
count++;
|
||||
}
|
||||
// Set start character back to FIRST_CHAR after we've done first loop.
|
||||
thirdCharStart = FIRST_CHAR;
|
||||
}
|
||||
secondCharStart = FIRST_CHAR;
|
||||
}
|
||||
return count;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,11 +17,11 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.client;
|
||||
|
||||
import static org.apache.hadoop.hbase.HBaseTestCase.assertByteEquals;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.hamcrest.Matchers.is;
|
||||
import static org.hamcrest.Matchers.lessThan;
|
||||
import static org.junit.Assert.assertArrayEquals;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.nio.ByteBuffer;
|
||||
|
@ -168,7 +168,7 @@ public class TestResult extends TestCase {
|
|||
for (int i = 0; i < 100; ++i) {
|
||||
final byte[] qf = Bytes.toBytes(i);
|
||||
|
||||
assertByteEquals(Bytes.add(value, Bytes.toBytes(i)), r.getValue(family, qf));
|
||||
assertArrayEquals(Bytes.add(value, Bytes.toBytes(i)), r.getValue(family, qf));
|
||||
assertTrue(r.containsColumn(family, qf));
|
||||
}
|
||||
}
|
||||
|
@ -187,7 +187,7 @@ public class TestResult extends TestCase {
|
|||
for (int i = 0; i < 100; ++i) {
|
||||
final byte[] qf = Bytes.toBytes(i);
|
||||
|
||||
assertByteEquals(Bytes.add(value, Bytes.toBytes(i)), r.getValue(family, qf));
|
||||
assertArrayEquals(Bytes.add(value, Bytes.toBytes(i)), r.getValue(family, qf));
|
||||
assertTrue(r.containsColumn(family, qf));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -38,11 +38,11 @@ import org.apache.hadoop.hbase.Cell;
|
|||
import org.apache.hadoop.hbase.Coprocessor;
|
||||
import org.apache.hadoop.hbase.CoprocessorEnvironment;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.HTestConst;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.client.Get;
|
||||
import org.apache.hadoop.hbase.client.RegionInfo;
|
||||
|
@ -292,7 +292,7 @@ public class TestCoprocessorInterface {
|
|||
HRegion region = initHRegion(tableName, name.getMethodName(), hc, new Class<?>[]{}, families);
|
||||
|
||||
for (int i = 0; i < 3; i++) {
|
||||
HBaseTestCase.addContent(region, fam3);
|
||||
HTestConst.addContent(region, fam3);
|
||||
region.flush(true);
|
||||
}
|
||||
|
||||
|
@ -354,7 +354,7 @@ public class TestCoprocessorInterface {
|
|||
HRegion region = initHRegion(tableName, name.getMethodName(), hc,
|
||||
new Class<?>[]{CoprocessorImpl.class}, families);
|
||||
for (int i = 0; i < 3; i++) {
|
||||
HBaseTestCase.addContent(region, fam3);
|
||||
HTestConst.addContent(region, fam3);
|
||||
region.flush(true);
|
||||
}
|
||||
|
||||
|
|
|
@ -17,12 +17,11 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import static org.apache.hadoop.hbase.HBaseTestCase.addContent;
|
||||
import static org.apache.hadoop.hbase.HTestConst.addContent;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
|
|
|
@ -28,7 +28,7 @@ import static org.junit.Assert.assertFalse;
|
|||
import static org.junit.Assert.assertThrows;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
import static org.mockito.Matchers.any;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.Mockito.doAnswer;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.spy;
|
||||
|
@ -50,11 +50,11 @@ import org.apache.hadoop.fs.Path;
|
|||
import org.apache.hadoop.hbase.ChoreService;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.HTestConst;
|
||||
import org.apache.hadoop.hbase.Waiter;
|
||||
import org.apache.hadoop.hbase.client.Delete;
|
||||
import org.apache.hadoop.hbase.client.Durability;
|
||||
|
@ -175,7 +175,7 @@ public class TestCompaction {
|
|||
for (int j = 0; j < jmax; j++) {
|
||||
p.addColumn(COLUMN_FAMILY, Bytes.toBytes(j), pad);
|
||||
}
|
||||
HBaseTestCase.addContent(loader, Bytes.toString(COLUMN_FAMILY));
|
||||
HTestConst.addContent(loader, Bytes.toString(COLUMN_FAMILY));
|
||||
loader.put(p);
|
||||
r.flush(true);
|
||||
}
|
||||
|
@ -251,7 +251,7 @@ public class TestCompaction {
|
|||
for (int j = 0; j < jmax; j++) {
|
||||
p.addColumn(COLUMN_FAMILY, Bytes.toBytes(j), pad);
|
||||
}
|
||||
HBaseTestCase.addContent(loader, Bytes.toString(COLUMN_FAMILY));
|
||||
HTestConst.addContent(loader, Bytes.toString(COLUMN_FAMILY));
|
||||
loader.put(p);
|
||||
r.flush(true);
|
||||
}
|
||||
|
@ -330,7 +330,7 @@ public class TestCompaction {
|
|||
|
||||
private void createStoreFile(final HRegion region, String family) throws IOException {
|
||||
Table loader = new RegionAsTable(region);
|
||||
HBaseTestCase.addContent(loader, family);
|
||||
HTestConst.addContent(loader, family);
|
||||
region.flush(true);
|
||||
}
|
||||
|
||||
|
@ -494,7 +494,7 @@ public class TestCompaction {
|
|||
for (int j = 0; j < jmax; j++) {
|
||||
p.addColumn(COLUMN_FAMILY, Bytes.toBytes(j), pad);
|
||||
}
|
||||
HBaseTestCase.addContent(loader, Bytes.toString(COLUMN_FAMILY));
|
||||
HTestConst.addContent(loader, Bytes.toString(COLUMN_FAMILY));
|
||||
loader.put(p);
|
||||
r.flush(true);
|
||||
}
|
||||
|
|
|
@ -17,6 +17,14 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import static org.junit.Assert.assertArrayEquals;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertFalse;
|
||||
import static org.junit.Assert.assertNotNull;
|
||||
import static org.junit.Assert.assertNull;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
import static org.mockito.ArgumentMatchers.any;
|
||||
import static org.mockito.Mockito.mock;
|
||||
import static org.mockito.Mockito.when;
|
||||
|
||||
|
@ -37,7 +45,6 @@ import org.apache.hadoop.fs.Path;
|
|||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellUtil;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
|
@ -60,7 +67,6 @@ import org.apache.hadoop.hbase.io.hfile.CacheStats;
|
|||
import org.apache.hadoop.hbase.io.hfile.HFileContext;
|
||||
import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
|
||||
import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder;
|
||||
import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoderImpl;
|
||||
import org.apache.hadoop.hbase.io.hfile.HFileInfo;
|
||||
import org.apache.hadoop.hbase.io.hfile.HFileScanner;
|
||||
import org.apache.hadoop.hbase.io.hfile.ReaderContext;
|
||||
|
@ -72,11 +78,13 @@ import org.apache.hadoop.hbase.util.Bytes;
|
|||
import org.apache.hadoop.hbase.util.ChecksumType;
|
||||
import org.apache.hadoop.hbase.util.CommonFSUtils;
|
||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||
import org.junit.After;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.Before;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Rule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.junit.rules.TestName;
|
||||
import org.mockito.Mockito;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
@ -88,8 +96,8 @@ import org.apache.hbase.thirdparty.com.google.common.collect.Lists;
|
|||
/**
|
||||
* Test HStoreFile
|
||||
*/
|
||||
@Category({RegionServerTests.class, MediumTests.class})
|
||||
public class TestHStoreFile extends HBaseTestCase {
|
||||
@Category({ RegionServerTests.class, MediumTests.class })
|
||||
public class TestHStoreFile {
|
||||
|
||||
@ClassRule
|
||||
public static final HBaseClassTestRule CLASS_RULE =
|
||||
|
@ -102,22 +110,35 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
private static final ChecksumType CKTYPE = ChecksumType.CRC32C;
|
||||
private static final int CKBYTES = 512;
|
||||
private static String TEST_FAMILY = "cf";
|
||||
private static final char FIRST_CHAR = 'a';
|
||||
private static final char LAST_CHAR = 'z';
|
||||
|
||||
@Rule
|
||||
public TestName name = new TestName();
|
||||
|
||||
private Configuration conf;
|
||||
private Path testDir;
|
||||
private FileSystem fs;
|
||||
|
||||
@Override
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
public void setUp() throws IOException {
|
||||
conf = TEST_UTIL.getConfiguration();
|
||||
testDir = TEST_UTIL.getDataTestDir(name.getMethodName());
|
||||
fs = testDir.getFileSystem(conf);
|
||||
}
|
||||
|
||||
@Override
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
super.tearDown();
|
||||
@AfterClass
|
||||
public static void tearDownAfterClass() {
|
||||
TEST_UTIL.cleanupTestDir();
|
||||
}
|
||||
|
||||
/**
|
||||
* Write a file and then assert that we can read from top and bottom halves using two
|
||||
<<<<<<< HEAD
|
||||
* HalfMapFiles, as well as one HalfMapFile and one HFileLink file.
|
||||
=======
|
||||
* HalfMapFiles.
|
||||
>>>>>>> 16116fa35e... HBASE-24510 Remove HBaseTestCase and GenericTestUtils (#1859)
|
||||
*/
|
||||
@Test
|
||||
public void testBasicHalfAndHFileLinkMapFile() throws Exception {
|
||||
|
@ -129,11 +150,9 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(conf, fs,
|
||||
CommonFSUtils.getTableDir(CommonFSUtils.getRootDir(conf), hri.getTable()), hri);
|
||||
|
||||
HFileContext meta = new HFileContextBuilder().withBlockSize(2*1024).build();
|
||||
HFileContext meta = new HFileContextBuilder().withBlockSize(2 * 1024).build();
|
||||
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
|
||||
.withFilePath(regionFs.createTempName())
|
||||
.withFileContext(meta)
|
||||
.build();
|
||||
.withFilePath(regionFs.createTempName()).withFileContext(meta).build();
|
||||
writeStoreFile(writer);
|
||||
|
||||
Path sfPath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath());
|
||||
|
@ -142,15 +161,15 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
}
|
||||
|
||||
private void writeStoreFile(final StoreFileWriter writer) throws IOException {
|
||||
writeStoreFile(writer, Bytes.toBytes(getName()), Bytes.toBytes(getName()));
|
||||
writeStoreFile(writer, Bytes.toBytes(name.getMethodName()),
|
||||
Bytes.toBytes(name.getMethodName()));
|
||||
}
|
||||
|
||||
// pick an split point (roughly halfway)
|
||||
byte[] SPLITKEY = new byte[] { (LAST_CHAR + FIRST_CHAR) / 2, FIRST_CHAR };
|
||||
|
||||
/*
|
||||
* Writes HStoreKey and ImmutableBytes data to passed writer and
|
||||
* then closes it.
|
||||
* Writes HStoreKey and ImmutableBytes data to passed writer and then closes it.
|
||||
* @param writer
|
||||
* @throws IOException
|
||||
*/
|
||||
|
@ -170,8 +189,8 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
}
|
||||
|
||||
/**
|
||||
* Test that our mechanism of writing store files in one region to reference
|
||||
* store files in other regions works.
|
||||
* Test that our mechanism of writing store files in one region to reference store files in other
|
||||
* regions works.
|
||||
*/
|
||||
@Test
|
||||
public void testReference() throws IOException {
|
||||
|
@ -182,9 +201,7 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
|
||||
// Make a store file and write data to it.
|
||||
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
|
||||
.withFilePath(regionFs.createTempName())
|
||||
.withFileContext(meta)
|
||||
.build();
|
||||
.withFilePath(regionFs.createTempName()).withFileContext(meta).build();
|
||||
writeStoreFile(writer);
|
||||
|
||||
Path hsfPath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath());
|
||||
|
@ -194,8 +211,8 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
// Split on a row, not in middle of row. Midkey returned by reader
|
||||
// may be in middle of row. Create new one with empty column and
|
||||
// timestamp.
|
||||
byte [] midRow = CellUtil.cloneRow(reader.midKey().get());
|
||||
byte [] finalRow = CellUtil.cloneRow(reader.getLastKey().get());
|
||||
byte[] midRow = CellUtil.cloneRow(reader.midKey().get());
|
||||
byte[] finalRow = CellUtil.cloneRow(reader.getLastKey().get());
|
||||
hsf.closeStoreFile(true);
|
||||
|
||||
// Make a reference
|
||||
|
@ -256,12 +273,13 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
byte[] cf = Bytes.toBytes("ty");
|
||||
ColumnFamilyDescriptor cfd = ColumnFamilyDescriptorBuilder.of(cf);
|
||||
when(store.getColumnFamilyDescriptor()).thenReturn(cfd);
|
||||
StoreFileScanner scanner =
|
||||
new StoreFileScanner(reader, mock(HFileScanner.class), false, false, 0, 0, true);
|
||||
try (StoreFileScanner scanner =
|
||||
new StoreFileScanner(reader, mock(HFileScanner.class), false, false, 0, 0, true)) {
|
||||
Scan scan = new Scan();
|
||||
scan.setColumnFamilyTimeRange(cf, 0, 1);
|
||||
assertFalse(scanner.shouldUseScanner(scan, store, 0));
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testHFileLink() throws IOException {
|
||||
|
@ -269,22 +287,20 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
// force temp data in hbase/target/test-data instead of /tmp/hbase-xxxx/
|
||||
Configuration testConf = new Configuration(this.conf);
|
||||
CommonFSUtils.setRootDir(testConf, testDir);
|
||||
HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(
|
||||
testConf, fs, CommonFSUtils.getTableDir(testDir, hri.getTable()), hri);
|
||||
HRegionFileSystem regionFs = HRegionFileSystem.createRegionOnFileSystem(testConf, fs,
|
||||
CommonFSUtils.getTableDir(testDir, hri.getTable()), hri);
|
||||
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
|
||||
|
||||
// Make a store file and write data to it.
|
||||
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
|
||||
.withFilePath(regionFs.createTempName())
|
||||
.withFileContext(meta)
|
||||
.build();
|
||||
.withFilePath(regionFs.createTempName()).withFileContext(meta).build();
|
||||
writeStoreFile(writer);
|
||||
|
||||
Path storeFilePath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath());
|
||||
Path dstPath = new Path(regionFs.getTableDir(), new Path("test-region", TEST_FAMILY));
|
||||
HFileLink.create(testConf, this.fs, dstPath, hri, storeFilePath.getName());
|
||||
Path linkFilePath = new Path(dstPath,
|
||||
HFileLink.createHFileLinkName(hri, storeFilePath.getName()));
|
||||
Path linkFilePath =
|
||||
new Path(dstPath, HFileLink.createHFileLinkName(hri, storeFilePath.getName()));
|
||||
|
||||
// Try to open store file from link
|
||||
StoreFileInfo storeFileInfo = new StoreFileInfo(testConf, this.fs, linkFilePath, true);
|
||||
|
@ -303,8 +319,8 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
}
|
||||
|
||||
/**
|
||||
* This test creates an hfile and then the dir structures and files to verify that references
|
||||
* to hfilelinks (created by snapshot clones) can be properly interpreted.
|
||||
* This test creates an hfile and then the dir structures and files to verify that references to
|
||||
* hfilelinks (created by snapshot clones) can be properly interpreted.
|
||||
*/
|
||||
@Test
|
||||
public void testReferenceToHFileLink() throws IOException {
|
||||
|
@ -320,21 +336,18 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
|
||||
// Make a store file and write data to it. <root>/<tablename>/<rgn>/<cf>/<file>
|
||||
StoreFileWriter writer = new StoreFileWriter.Builder(testConf, cacheConf, this.fs)
|
||||
.withFilePath(regionFs.createTempName())
|
||||
.withFileContext(meta)
|
||||
.build();
|
||||
.withFilePath(regionFs.createTempName()).withFileContext(meta).build();
|
||||
writeStoreFile(writer);
|
||||
Path storeFilePath = regionFs.commitStoreFile(TEST_FAMILY, writer.getPath());
|
||||
|
||||
// create link to store file. <root>/clone/region/<cf>/<hfile>-<region>-<table>
|
||||
HRegionInfo hriClone = new HRegionInfo(TableName.valueOf("clone"));
|
||||
HRegionFileSystem cloneRegionFs = HRegionFileSystem.createRegionOnFileSystem(
|
||||
testConf, fs, CommonFSUtils.getTableDir(testDir, hri.getTable()),
|
||||
hriClone);
|
||||
RegionInfo hriClone = RegionInfoBuilder.newBuilder(TableName.valueOf("clone")).build();
|
||||
HRegionFileSystem cloneRegionFs = HRegionFileSystem.createRegionOnFileSystem(testConf, fs,
|
||||
CommonFSUtils.getTableDir(testDir, hri.getTable()), hriClone);
|
||||
Path dstPath = cloneRegionFs.getStoreDir(TEST_FAMILY);
|
||||
HFileLink.create(testConf, this.fs, dstPath, hri, storeFilePath.getName());
|
||||
Path linkFilePath = new Path(dstPath,
|
||||
HFileLink.createHFileLinkName(hri, storeFilePath.getName()));
|
||||
Path linkFilePath =
|
||||
new Path(dstPath, HFileLink.createHFileLinkName(hri, storeFilePath.getName()));
|
||||
|
||||
// create splits of the link.
|
||||
// <root>/clone/splitA/<cf>/<reftohfilelink>,
|
||||
|
@ -373,7 +386,7 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
HFileScanner sB = hsfB.getReader().getScanner(false, false);
|
||||
sB.seekTo();
|
||||
|
||||
//count++ as seekTo() will advance the scanner
|
||||
// count++ as seekTo() will advance the scanner
|
||||
count++;
|
||||
while (sB.next()) {
|
||||
count++;
|
||||
|
@ -422,8 +435,7 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
|
||||
if ((PrivateCellUtil.compare(topScanner.getReader().getComparator(), midKV, key.array(),
|
||||
key.arrayOffset(), key.limit())) > 0) {
|
||||
fail("key=" + Bytes.toStringBinary(key) + " < midkey=" +
|
||||
midkey);
|
||||
fail("key=" + Bytes.toStringBinary(key) + " < midkey=" + midkey);
|
||||
}
|
||||
if (first) {
|
||||
first = false;
|
||||
|
@ -434,14 +446,12 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
|
||||
first = true;
|
||||
HFileScanner bottomScanner = bottom.getScanner(false, false);
|
||||
while ((!bottomScanner.isSeeked() && bottomScanner.seekTo()) ||
|
||||
bottomScanner.next()) {
|
||||
while ((!bottomScanner.isSeeked() && bottomScanner.seekTo()) || bottomScanner.next()) {
|
||||
previous = ByteBuffer.wrap(((KeyValue) bottomScanner.getKey()).getKey());
|
||||
key = ByteBuffer.wrap(((KeyValue) bottomScanner.getKey()).getKey());
|
||||
if (first) {
|
||||
first = false;
|
||||
LOG.info("First in bottom: " +
|
||||
Bytes.toString(Bytes.toBytes(previous)));
|
||||
LOG.info("First in bottom: " + Bytes.toString(Bytes.toBytes(previous)));
|
||||
}
|
||||
assertTrue(key.compareTo(bbMidkeyBytes) < 0);
|
||||
}
|
||||
|
@ -455,7 +465,7 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
// 2. test using a midkey which will generate one Reference file and one HFileLink file.
|
||||
// First, do a key that is < than first key. Ensure splits behave
|
||||
// properly.
|
||||
byte [] badmidkey = Bytes.toBytes(" .");
|
||||
byte[] badmidkey = Bytes.toBytes(" .");
|
||||
assertTrue(fs.exists(f.getPath()));
|
||||
topPath = splitStoreFile(regionFs, topHri, TEST_FAMILY, f, badmidkey, true);
|
||||
bottomPath = splitStoreFile(regionFs, bottomHri, TEST_FAMILY, f, badmidkey, false);
|
||||
|
@ -469,8 +479,7 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
first = true;
|
||||
topScanner = top.getScanner(false, false);
|
||||
KeyValue.KeyOnlyKeyValue keyOnlyKV = new KeyValue.KeyOnlyKeyValue();
|
||||
while ((!topScanner.isSeeked() && topScanner.seekTo()) ||
|
||||
topScanner.next()) {
|
||||
while ((!topScanner.isSeeked() && topScanner.seekTo()) || topScanner.next()) {
|
||||
key = ByteBuffer.wrap(((KeyValue) topScanner.getKey()).getKey());
|
||||
keyOnlyKV.setKey(key.array(), 0 + key.arrayOffset(), key.limit());
|
||||
assertTrue(PrivateCellUtil.compare(topScanner.getReader().getComparator(), keyOnlyKV,
|
||||
|
@ -498,7 +507,7 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
|
||||
// Test when badkey is > than last key in file ('||' > 'zz').
|
||||
badmidkey = Bytes.toBytes("|||");
|
||||
topPath = splitStoreFile(regionFs,topHri, TEST_FAMILY, f, badmidkey, true);
|
||||
topPath = splitStoreFile(regionFs, topHri, TEST_FAMILY, f, badmidkey, true);
|
||||
bottomPath = splitStoreFile(regionFs, bottomHri, TEST_FAMILY, f, badmidkey, false);
|
||||
assertNull(topPath);
|
||||
|
||||
|
@ -507,8 +516,7 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
bottom = bottomF.getReader();
|
||||
first = true;
|
||||
bottomScanner = bottom.getScanner(false, false);
|
||||
while ((!bottomScanner.isSeeked() && bottomScanner.seekTo()) ||
|
||||
bottomScanner.next()) {
|
||||
while ((!bottomScanner.isSeeked() && bottomScanner.seekTo()) || bottomScanner.next()) {
|
||||
key = ByteBuffer.wrap(((KeyValue) bottomScanner.getKey()).getKey());
|
||||
if (first) {
|
||||
first = false;
|
||||
|
@ -550,8 +558,8 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
long now = EnvironmentEdgeManager.currentTime();
|
||||
for (int i = 0; i < 2000; i += 2) {
|
||||
String row = String.format(localFormatter, i);
|
||||
KeyValue kv = new KeyValue(Bytes.toBytes(row), Bytes.toBytes("family"),
|
||||
Bytes.toBytes("col"), now, Bytes.toBytes("value"));
|
||||
KeyValue kv = new KeyValue(Bytes.toBytes(row), Bytes.toBytes("family"), Bytes.toBytes("col"),
|
||||
now, Bytes.toBytes("value"));
|
||||
writer.append(kv);
|
||||
}
|
||||
writer.close();
|
||||
|
@ -573,7 +581,7 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
TreeSet<byte[]> columns = new TreeSet<>(Bytes.BYTES_COMPARATOR);
|
||||
columns.add(Bytes.toBytes("family:col"));
|
||||
|
||||
Scan scan = new Scan(Bytes.toBytes(row),Bytes.toBytes(row));
|
||||
Scan scan = new Scan().withStartRow(Bytes.toBytes(row)).withStopRow(Bytes.toBytes(row), true);
|
||||
scan.addColumn(Bytes.toBytes("family"), Bytes.toBytes("family:col"));
|
||||
HStore store = mock(HStore.class);
|
||||
when(store.getColumnFamilyDescriptor())
|
||||
|
@ -593,60 +601,48 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
fs.delete(f, true);
|
||||
assertEquals("False negatives: " + falseNeg, 0, falseNeg);
|
||||
int maxFalsePos = (int) (2 * 2000 * err);
|
||||
assertTrue("Too many false positives: " + falsePos + " (err=" + err + ", expected no more than "
|
||||
+ maxFalsePos + ")", falsePos <= maxFalsePos);
|
||||
assertTrue("Too many false positives: " + falsePos + " (err=" + err +
|
||||
", expected no more than " + maxFalsePos + ")", falsePos <= maxFalsePos);
|
||||
}
|
||||
|
||||
private static final int BLOCKSIZE_SMALL = 8192;
|
||||
|
||||
@Test
|
||||
public void testBloomFilter() throws Exception {
|
||||
FileSystem fs = FileSystem.getLocal(conf);
|
||||
conf.setFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, (float) 0.01);
|
||||
conf.setBoolean(BloomFilterFactory.IO_STOREFILE_BLOOM_ENABLED, true);
|
||||
|
||||
// write the file
|
||||
Path f = new Path(ROOT_DIR, getName());
|
||||
Path f = new Path(ROOT_DIR, name.getMethodName());
|
||||
HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL)
|
||||
.withChecksumType(CKTYPE)
|
||||
.withBytesPerCheckSum(CKBYTES).build();
|
||||
.withChecksumType(CKTYPE).withBytesPerCheckSum(CKBYTES).build();
|
||||
// Make a store file and write data to it.
|
||||
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
|
||||
.withFilePath(f)
|
||||
.withBloomType(BloomType.ROW)
|
||||
.withMaxKeyCount(2000)
|
||||
.withFileContext(meta)
|
||||
.build();
|
||||
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs).withFilePath(f)
|
||||
.withBloomType(BloomType.ROW).withMaxKeyCount(2000).withFileContext(meta).build();
|
||||
bloomWriteRead(writer, fs);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testDeleteFamilyBloomFilter() throws Exception {
|
||||
FileSystem fs = FileSystem.getLocal(conf);
|
||||
conf.setFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, (float) 0.01);
|
||||
conf.setBoolean(BloomFilterFactory.IO_STOREFILE_BLOOM_ENABLED, true);
|
||||
float err = conf.getFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, 0);
|
||||
|
||||
// write the file
|
||||
Path f = new Path(ROOT_DIR, getName());
|
||||
Path f = new Path(ROOT_DIR, name.getMethodName());
|
||||
|
||||
HFileContext meta = new HFileContextBuilder()
|
||||
.withBlockSize(BLOCKSIZE_SMALL)
|
||||
.withChecksumType(CKTYPE)
|
||||
.withBytesPerCheckSum(CKBYTES).build();
|
||||
HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL)
|
||||
.withChecksumType(CKTYPE).withBytesPerCheckSum(CKBYTES).build();
|
||||
// Make a store file and write data to it.
|
||||
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
|
||||
.withFilePath(f)
|
||||
.withMaxKeyCount(2000)
|
||||
.withFileContext(meta)
|
||||
.build();
|
||||
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs).withFilePath(f)
|
||||
.withMaxKeyCount(2000).withFileContext(meta).build();
|
||||
|
||||
// add delete family
|
||||
long now = EnvironmentEdgeManager.currentTime();
|
||||
for (int i = 0; i < 2000; i += 2) {
|
||||
String row = String.format(localFormatter, i);
|
||||
KeyValue kv = new KeyValue(Bytes.toBytes(row), Bytes.toBytes("family"),
|
||||
Bytes.toBytes("col"), now, KeyValue.Type.DeleteFamily, Bytes.toBytes("value"));
|
||||
KeyValue kv = new KeyValue(Bytes.toBytes(row), Bytes.toBytes("family"), Bytes.toBytes("col"),
|
||||
now, KeyValue.Type.DeleteFamily, Bytes.toBytes("value"));
|
||||
writer.append(kv);
|
||||
}
|
||||
writer.close();
|
||||
|
@ -681,8 +677,8 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
fs.delete(f, true);
|
||||
assertEquals("False negatives: " + falseNeg, 0, falseNeg);
|
||||
int maxFalsePos = (int) (2 * 2000 * err);
|
||||
assertTrue("Too many false positives: " + falsePos + " (err=" + err
|
||||
+ ", expected no more than " + maxFalsePos, falsePos <= maxFalsePos);
|
||||
assertTrue("Too many false positives: " + falsePos + " (err=" + err +
|
||||
", expected no more than " + maxFalsePos, falsePos <= maxFalsePos);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -691,13 +687,11 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
@Test
|
||||
public void testReseek() throws Exception {
|
||||
// write the file
|
||||
Path f = new Path(ROOT_DIR, getName());
|
||||
Path f = new Path(ROOT_DIR, name.getMethodName());
|
||||
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
|
||||
// Make a store file and write data to it.
|
||||
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
|
||||
.withFilePath(f)
|
||||
.withFileContext(meta)
|
||||
.build();
|
||||
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs).withFilePath(f)
|
||||
.withFileContext(meta).build();
|
||||
|
||||
writeStoreFile(writer);
|
||||
writer.close();
|
||||
|
@ -729,48 +723,40 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
int versions = 2;
|
||||
|
||||
// run once using columns and once using rows
|
||||
BloomType[] bt = {BloomType.ROWCOL, BloomType.ROW};
|
||||
int[] expKeys = {rowCount*colCount, rowCount};
|
||||
BloomType[] bt = { BloomType.ROWCOL, BloomType.ROW };
|
||||
int[] expKeys = { rowCount * colCount, rowCount };
|
||||
// below line deserves commentary. it is expected bloom false positives
|
||||
// column = rowCount*2*colCount inserts
|
||||
// row-level = only rowCount*2 inserts, but failures will be magnified by
|
||||
// 2nd for loop for every column (2*colCount)
|
||||
float[] expErr = {2*rowCount*colCount*err, 2*rowCount*2*colCount*err};
|
||||
float[] expErr = { 2 * rowCount * colCount * err, 2 * rowCount * 2 * colCount * err };
|
||||
|
||||
for (int x : new int[]{0,1}) {
|
||||
for (int x : new int[] { 0, 1 }) {
|
||||
// write the file
|
||||
Path f = new Path(ROOT_DIR, getName() + x);
|
||||
Path f = new Path(ROOT_DIR, name.getMethodName() + x);
|
||||
HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL)
|
||||
.withChecksumType(CKTYPE)
|
||||
.withBytesPerCheckSum(CKBYTES).build();
|
||||
.withChecksumType(CKTYPE).withBytesPerCheckSum(CKBYTES).build();
|
||||
// Make a store file and write data to it.
|
||||
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
|
||||
.withFilePath(f)
|
||||
.withBloomType(bt[x])
|
||||
.withMaxKeyCount(expKeys[x])
|
||||
.withFileContext(meta)
|
||||
.build();
|
||||
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs).withFilePath(f)
|
||||
.withBloomType(bt[x]).withMaxKeyCount(expKeys[x]).withFileContext(meta).build();
|
||||
|
||||
long now = EnvironmentEdgeManager.currentTime();
|
||||
for (int i = 0; i < rowCount * 2; i += 2) { // rows
|
||||
for (int j = 0; j < colCount * 2; j += 2) { // column qualifiers
|
||||
String row = String.format(localFormatter, i);
|
||||
String col = String.format(localFormatter, j);
|
||||
for (int k= 0; k < versions; ++k) { // versions
|
||||
for (int k = 0; k < versions; ++k) { // versions
|
||||
KeyValue kv = new KeyValue(Bytes.toBytes(row), Bytes.toBytes("family"),
|
||||
Bytes.toBytes("col" + col), now-k, Bytes.toBytes(-1L));
|
||||
Bytes.toBytes("col" + col), now - k, Bytes.toBytes(-1L));
|
||||
writer.append(kv);
|
||||
}
|
||||
}
|
||||
}
|
||||
writer.close();
|
||||
|
||||
ReaderContext context = new ReaderContextBuilder()
|
||||
.withFilePath(f)
|
||||
.withFileSize(fs.getFileStatus(f).getLen())
|
||||
.withFileSystem(fs)
|
||||
.withInputStreamWrapper(new FSDataInputStreamWrapper(fs, f))
|
||||
.build();
|
||||
ReaderContext context =
|
||||
new ReaderContextBuilder().withFilePath(f).withFileSize(fs.getFileStatus(f).getLen())
|
||||
.withFileSystem(fs).withInputStreamWrapper(new FSDataInputStreamWrapper(fs, f)).build();
|
||||
HFileInfo fileInfo = new HFileInfo(context, conf);
|
||||
StoreFileReader reader =
|
||||
new StoreFileReader(context, fileInfo, cacheConf, new AtomicInteger(0), conf);
|
||||
|
@ -786,18 +772,18 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
// check false positives rate
|
||||
int falsePos = 0;
|
||||
int falseNeg = 0;
|
||||
for (int i = 0; i < rowCount*2; ++i) { // rows
|
||||
for (int j = 0; j < colCount*2; ++j) { // column qualifiers
|
||||
for (int i = 0; i < rowCount * 2; ++i) { // rows
|
||||
for (int j = 0; j < colCount * 2; ++j) { // column qualifiers
|
||||
String row = String.format(localFormatter, i);
|
||||
String col = String.format(localFormatter, j);
|
||||
TreeSet<byte[]> columns = new TreeSet<>(Bytes.BYTES_COMPARATOR);
|
||||
columns.add(Bytes.toBytes("col" + col));
|
||||
|
||||
Scan scan = new Scan(Bytes.toBytes(row),Bytes.toBytes(row));
|
||||
scan.addColumn(Bytes.toBytes("family"), Bytes.toBytes(("col"+col)));
|
||||
Scan scan =
|
||||
new Scan().withStartRow(Bytes.toBytes(row)).withStopRow(Bytes.toBytes(row), true);
|
||||
scan.addColumn(Bytes.toBytes("family"), Bytes.toBytes(("col" + col)));
|
||||
|
||||
boolean exists =
|
||||
scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);
|
||||
boolean exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);
|
||||
boolean shouldRowExist = i % 2 == 0;
|
||||
boolean shouldColExist = j % 2 == 0;
|
||||
shouldColExist = shouldColExist || bt[x] == BloomType.ROW;
|
||||
|
@ -818,25 +804,24 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
System.out.println(" False negatives: " + falseNeg);
|
||||
System.out.println(" False positives: " + falsePos);
|
||||
assertEquals(0, falseNeg);
|
||||
assertTrue(falsePos < 2*expErr[x]);
|
||||
assertTrue(falsePos < 2 * expErr[x]);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSeqIdComparator() {
|
||||
assertOrdering(StoreFileComparators.SEQ_ID, mockStoreFile(true, 100, 1000, -1, "/foo/123"),
|
||||
mockStoreFile(true, 100, 1000, -1, "/foo/124"),
|
||||
mockStoreFile(true, 99, 1000, -1, "/foo/126"),
|
||||
mockStoreFile(true, 100, 1000, -1, "/foo/124"), mockStoreFile(true, 99, 1000, -1, "/foo/126"),
|
||||
mockStoreFile(true, 98, 2000, -1, "/foo/126"), mockStoreFile(false, 3453, -1, 1, "/foo/1"),
|
||||
mockStoreFile(false, 2, -1, 3, "/foo/2"), mockStoreFile(false, 1000, -1, 5, "/foo/2"),
|
||||
mockStoreFile(false, 76, -1, 5, "/foo/3"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Assert that the given comparator orders the given storefiles in the
|
||||
* same way that they're passed.
|
||||
* Assert that the given comparator orders the given storefiles in the same way that they're
|
||||
* passed.
|
||||
*/
|
||||
private void assertOrdering(Comparator<? super HStoreFile> comparator, HStoreFile ... sfs) {
|
||||
private void assertOrdering(Comparator<? super HStoreFile> comparator, HStoreFile... sfs) {
|
||||
ArrayList<HStoreFile> sorted = Lists.newArrayList(sfs);
|
||||
Collections.shuffle(sorted);
|
||||
Collections.sort(sorted, comparator);
|
||||
|
@ -848,10 +833,7 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
/**
|
||||
* Create a mock StoreFile with the given attributes.
|
||||
*/
|
||||
private HStoreFile mockStoreFile(boolean bulkLoad,
|
||||
long size,
|
||||
long bulkTimestamp,
|
||||
long seqId,
|
||||
private HStoreFile mockStoreFile(boolean bulkLoad, long size, long bulkTimestamp, long seqId,
|
||||
String path) {
|
||||
HStoreFile mock = Mockito.mock(HStoreFile.class);
|
||||
StoreFileReader reader = Mockito.mock(StoreFileReader.class);
|
||||
|
@ -863,10 +845,8 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
Mockito.doReturn(OptionalLong.of(bulkTimestamp)).when(mock).getBulkLoadTimestamp();
|
||||
Mockito.doReturn(seqId).when(mock).getMaxSequenceId();
|
||||
Mockito.doReturn(new Path(path)).when(mock).getPath();
|
||||
String name = "mock storefile, bulkLoad=" + bulkLoad +
|
||||
" bulkTimestamp=" + bulkTimestamp +
|
||||
" seqId=" + seqId +
|
||||
" path=" + path;
|
||||
String name = "mock storefile, bulkLoad=" + bulkLoad + " bulkTimestamp=" + bulkTimestamp +
|
||||
" seqId=" + seqId + " path=" + path;
|
||||
Mockito.doReturn(name).when(mock).toString();
|
||||
return mock;
|
||||
}
|
||||
|
@ -875,14 +855,13 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
* Generate a list of KeyValues for testing based on given parameters
|
||||
* @return the rows key-value list
|
||||
*/
|
||||
List<KeyValue> getKeyValueSet(long[] timestamps, int numRows,
|
||||
byte[] qualifier, byte[] family) {
|
||||
List<KeyValue> getKeyValueSet(long[] timestamps, int numRows, byte[] qualifier, byte[] family) {
|
||||
List<KeyValue> kvList = new ArrayList<>();
|
||||
for (int i=1;i<=numRows;i++) {
|
||||
byte[] b = Bytes.toBytes(i) ;
|
||||
for (int i = 1; i <= numRows; i++) {
|
||||
byte[] b = Bytes.toBytes(i);
|
||||
LOG.info(Bytes.toString(b));
|
||||
LOG.info(Bytes.toString(b));
|
||||
for (long timestamp: timestamps) {
|
||||
for (long timestamp : timestamps) {
|
||||
kvList.add(new KeyValue(b, family, qualifier, timestamp, b));
|
||||
}
|
||||
}
|
||||
|
@ -897,7 +876,7 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
byte[] family = Bytes.toBytes("familyname");
|
||||
byte[] qualifier = Bytes.toBytes("qualifier");
|
||||
int numRows = 10;
|
||||
long[] timestamps = new long[] {20,10,5,1};
|
||||
long[] timestamps = new long[] { 20, 10, 5, 1 };
|
||||
Scan scan = new Scan();
|
||||
|
||||
// Make up a directory hierarchy that has a regiondir ("7e0102") and familyname.
|
||||
|
@ -906,12 +885,9 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
HFileContext meta = new HFileContextBuilder().withBlockSize(8 * 1024).build();
|
||||
// Make a store file and write data to it.
|
||||
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
|
||||
.withOutputDir(dir)
|
||||
.withFileContext(meta)
|
||||
.build();
|
||||
.withOutputDir(dir).withFileContext(meta).build();
|
||||
|
||||
List<KeyValue> kvList = getKeyValueSet(timestamps,numRows,
|
||||
qualifier, family);
|
||||
List<KeyValue> kvList = getKeyValueSet(timestamps, numRows, qualifier, family);
|
||||
|
||||
for (KeyValue kv : kvList) {
|
||||
writer.append(kv);
|
||||
|
@ -919,8 +895,8 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
writer.appendMetadata(0, false);
|
||||
writer.close();
|
||||
|
||||
HStoreFile hsf = new HStoreFile(this.fs, writer.getPath(), conf, cacheConf,
|
||||
BloomType.NONE, true);
|
||||
HStoreFile hsf =
|
||||
new HStoreFile(this.fs, writer.getPath(), conf, cacheConf, BloomType.NONE, true);
|
||||
HStore store = mock(HStore.class);
|
||||
when(store.getColumnFamilyDescriptor()).thenReturn(ColumnFamilyDescriptorBuilder.of(family));
|
||||
hsf.initReader();
|
||||
|
@ -959,7 +935,7 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
Configuration conf = this.conf;
|
||||
|
||||
// Find a home for our files (regiondir ("7e0102") and familyname).
|
||||
Path baseDir = new Path(new Path(testDir, "7e0102"),"twoCOWEOC");
|
||||
Path baseDir = new Path(new Path(testDir, "7e0102"), "twoCOWEOC");
|
||||
|
||||
// Grab the block cache and get the initial hit/miss counts
|
||||
BlockCache bc = BlockCacheFactory.createBlockCache(conf);
|
||||
|
@ -974,8 +950,8 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
CacheConfig cacheConf = new CacheConfig(conf, bc);
|
||||
Path pathCowOff = new Path(baseDir, "123456789");
|
||||
StoreFileWriter writer = writeStoreFile(conf, cacheConf, pathCowOff, 3);
|
||||
HStoreFile hsf = new HStoreFile(this.fs, writer.getPath(), conf, cacheConf,
|
||||
BloomType.NONE, true);
|
||||
HStoreFile hsf =
|
||||
new HStoreFile(this.fs, writer.getPath(), conf, cacheConf, BloomType.NONE, true);
|
||||
LOG.debug(hsf.getPath().toString());
|
||||
|
||||
// Read this file, we should see 3 misses
|
||||
|
@ -999,8 +975,7 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
cacheConf = new CacheConfig(conf, bc);
|
||||
Path pathCowOn = new Path(baseDir, "123456788");
|
||||
writer = writeStoreFile(conf, cacheConf, pathCowOn, 3);
|
||||
hsf = new HStoreFile(this.fs, writer.getPath(), conf, cacheConf,
|
||||
BloomType.NONE, true);
|
||||
hsf = new HStoreFile(this.fs, writer.getPath(), conf, cacheConf, BloomType.NONE, true);
|
||||
|
||||
// Read this file, we should see 3 hits
|
||||
hsf.initReader();
|
||||
|
@ -1037,11 +1012,9 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
assertTrue(kv1.equals(kv2));
|
||||
KeyValue keyv1 = KeyValueUtil.ensureKeyValue(kv1);
|
||||
KeyValue keyv2 = KeyValueUtil.ensureKeyValue(kv2);
|
||||
assertTrue(Bytes.compareTo(
|
||||
keyv1.getBuffer(), keyv1.getKeyOffset(), keyv1.getKeyLength(),
|
||||
assertTrue(Bytes.compareTo(keyv1.getBuffer(), keyv1.getKeyOffset(), keyv1.getKeyLength(),
|
||||
keyv2.getBuffer(), keyv2.getKeyOffset(), keyv2.getKeyLength()) == 0);
|
||||
assertTrue(Bytes.compareTo(
|
||||
kv1.getValueArray(), kv1.getValueOffset(), kv1.getValueLength(),
|
||||
assertTrue(Bytes.compareTo(kv1.getValueArray(), kv1.getValueOffset(), kv1.getValueLength(),
|
||||
kv2.getValueArray(), kv2.getValueOffset(), kv2.getValueLength()) == 0);
|
||||
}
|
||||
assertNull(scannerTwo.next());
|
||||
|
@ -1082,10 +1055,9 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
assertEquals(startEvicted, cs.getEvictedCount());
|
||||
}
|
||||
|
||||
private Path splitStoreFile(final HRegionFileSystem regionFs, final HRegionInfo hri,
|
||||
private Path splitStoreFile(final HRegionFileSystem regionFs, final RegionInfo hri,
|
||||
final String family, final HStoreFile sf, final byte[] splitKey, boolean isTopRef)
|
||||
throws IOException {
|
||||
FileSystem fs = regionFs.getFileSystem();
|
||||
Path path = regionFs.splitStoreFile(hri, family, sf, splitKey, isTopRef, null);
|
||||
if (null == path) {
|
||||
return null;
|
||||
|
@ -1099,27 +1071,22 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
// Let's put ~5 small KVs in each block, so let's make 5*numBlocks KVs
|
||||
int numKVs = 5 * numBlocks;
|
||||
List<KeyValue> kvs = new ArrayList<>(numKVs);
|
||||
byte [] b = Bytes.toBytes("x");
|
||||
byte[] b = Bytes.toBytes("x");
|
||||
int totalSize = 0;
|
||||
for (int i=numKVs;i>0;i--) {
|
||||
for (int i = numKVs; i > 0; i--) {
|
||||
KeyValue kv = new KeyValue(b, b, b, i, b);
|
||||
kvs.add(kv);
|
||||
// kv has memstoreTS 0, which takes 1 byte to store.
|
||||
totalSize += kv.getLength() + 1;
|
||||
}
|
||||
int blockSize = totalSize / numBlocks;
|
||||
HFileContext meta = new HFileContextBuilder().withBlockSize(blockSize)
|
||||
.withChecksumType(CKTYPE)
|
||||
.withBytesPerCheckSum(CKBYTES)
|
||||
.build();
|
||||
HFileContext meta = new HFileContextBuilder().withBlockSize(blockSize).withChecksumType(CKTYPE)
|
||||
.withBytesPerCheckSum(CKBYTES).build();
|
||||
// Make a store file and write data to it.
|
||||
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
|
||||
.withFilePath(path)
|
||||
.withMaxKeyCount(2000)
|
||||
.withFileContext(meta)
|
||||
.build();
|
||||
.withFilePath(path).withMaxKeyCount(2000).withFileContext(meta).build();
|
||||
// We'll write N-1 KVs to ensure we don't write an extra block
|
||||
kvs.remove(kvs.size()-1);
|
||||
kvs.remove(kvs.size() - 1);
|
||||
for (KeyValue kv : kvs) {
|
||||
writer.append(kv);
|
||||
}
|
||||
|
@ -1129,8 +1096,7 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
}
|
||||
|
||||
/**
|
||||
* Check if data block encoding information is saved correctly in HFile's
|
||||
* file info.
|
||||
* Check if data block encoding information is saved correctly in HFile's file info.
|
||||
*/
|
||||
@Test
|
||||
public void testDataBlockEncodingMetaData() throws IOException {
|
||||
|
@ -1138,23 +1104,14 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
Path dir = new Path(new Path(testDir, "7e0102"), "familyname");
|
||||
Path path = new Path(dir, "1234567890");
|
||||
|
||||
DataBlockEncoding dataBlockEncoderAlgo =
|
||||
DataBlockEncoding.FAST_DIFF;
|
||||
HFileDataBlockEncoder dataBlockEncoder =
|
||||
new HFileDataBlockEncoderImpl(
|
||||
dataBlockEncoderAlgo);
|
||||
DataBlockEncoding dataBlockEncoderAlgo = DataBlockEncoding.FAST_DIFF;
|
||||
cacheConf = new CacheConfig(conf);
|
||||
HFileContext meta = new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL)
|
||||
.withChecksumType(CKTYPE)
|
||||
.withBytesPerCheckSum(CKBYTES)
|
||||
.withDataBlockEncoding(dataBlockEncoderAlgo)
|
||||
.build();
|
||||
HFileContext meta =
|
||||
new HFileContextBuilder().withBlockSize(BLOCKSIZE_SMALL).withChecksumType(CKTYPE)
|
||||
.withBytesPerCheckSum(CKBYTES).withDataBlockEncoding(dataBlockEncoderAlgo).build();
|
||||
// Make a store file and write data to it.
|
||||
StoreFileWriter writer = new StoreFileWriter.Builder(conf, cacheConf, this.fs)
|
||||
.withFilePath(path)
|
||||
.withMaxKeyCount(2000)
|
||||
.withFileContext(meta)
|
||||
.build();
|
||||
.withFilePath(path).withMaxKeyCount(2000).withFileContext(meta).build();
|
||||
writer.close();
|
||||
|
||||
HStoreFile storeFile =
|
||||
|
@ -1164,6 +1121,6 @@ public class TestHStoreFile extends HBaseTestCase {
|
|||
|
||||
Map<byte[], byte[]> fileInfo = reader.loadFileInfo();
|
||||
byte[] value = fileInfo.get(HFileDataBlockEncoder.DATA_BLOCK_ENCODING);
|
||||
assertEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
|
||||
assertArrayEquals(dataBlockEncoderAlgo.getNameInBytes(), value);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,6 +17,10 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertThrows;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Arrays;
|
||||
|
@ -24,19 +28,17 @@ import java.util.List;
|
|||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellComparatorImpl;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.CollectionBackedScanner;
|
||||
import org.junit.Before;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
|
||||
@Category({RegionServerTests.class, SmallTests.class})
|
||||
public class TestKeyValueHeap extends HBaseTestCase {
|
||||
@Category({ RegionServerTests.class, SmallTests.class })
|
||||
public class TestKeyValueHeap {
|
||||
|
||||
@ClassRule
|
||||
public static final HBaseClassTestRule CLASS_RULE =
|
||||
|
@ -74,67 +76,59 @@ public class TestKeyValueHeap extends HBaseTestCase {
|
|||
List<KeyValueScanner> scanners = new ArrayList<>(Arrays.asList(s1, s2, s3));
|
||||
|
||||
/*
|
||||
* Uses {@code scanners} to build a KeyValueHeap, iterates over it and asserts that returned
|
||||
* Cells are same as {@code expected}.
|
||||
* Uses {@code scanners} to build a KeyValueHeap, iterates over it and asserts that returned Cells
|
||||
* are same as {@code expected}.
|
||||
* @return List of Cells returned from scanners.
|
||||
*/
|
||||
public List<Cell> assertCells(List<Cell> expected, List<KeyValueScanner> scanners)
|
||||
throws IOException {
|
||||
//Creating KeyValueHeap
|
||||
KeyValueHeap kvh = new KeyValueHeap(scanners, CellComparatorImpl.COMPARATOR);
|
||||
|
||||
// Creating KeyValueHeap
|
||||
try (KeyValueHeap kvh = new KeyValueHeap(scanners, CellComparatorImpl.COMPARATOR)) {
|
||||
List<Cell> actual = new ArrayList<>();
|
||||
while(kvh.peek() != null){
|
||||
while (kvh.peek() != null) {
|
||||
actual.add(kvh.next());
|
||||
}
|
||||
|
||||
assertEquals(expected, actual);
|
||||
return actual;
|
||||
}
|
||||
|
||||
@Override
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSorted() throws IOException{
|
||||
//Cases that need to be checked are:
|
||||
//1. The "smallest" Cell is in the same scanners as current
|
||||
//2. Current scanner gets empty
|
||||
public void testSorted() throws IOException {
|
||||
// Cases that need to be checked are:
|
||||
// 1. The "smallest" Cell is in the same scanners as current
|
||||
// 2. Current scanner gets empty
|
||||
|
||||
List<Cell> expected = Arrays.asList(
|
||||
kv111, kv112, kv113, kv114, kv115, kv121, kv122, kv211, kv212, kv213);
|
||||
List<Cell> expected =
|
||||
Arrays.asList(kv111, kv112, kv113, kv114, kv115, kv121, kv122, kv211, kv212, kv213);
|
||||
|
||||
List<Cell> actual = assertCells(expected, scanners);
|
||||
|
||||
//Check if result is sorted according to Comparator
|
||||
for(int i=0; i<actual.size()-1; i++){
|
||||
int ret = CellComparatorImpl.COMPARATOR.compare(actual.get(i), actual.get(i+1));
|
||||
// Check if result is sorted according to Comparator
|
||||
for (int i = 0; i < actual.size() - 1; i++) {
|
||||
int ret = CellComparatorImpl.COMPARATOR.compare(actual.get(i), actual.get(i + 1));
|
||||
assertTrue(ret < 0);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSeek() throws IOException {
|
||||
//Cases:
|
||||
//1. Seek Cell that is not in scanner
|
||||
//2. Check that smallest that is returned from a seek is correct
|
||||
|
||||
// Cases:
|
||||
// 1. Seek Cell that is not in scanner
|
||||
// 2. Check that smallest that is returned from a seek is correct
|
||||
List<Cell> expected = Arrays.asList(kv211);
|
||||
|
||||
//Creating KeyValueHeap
|
||||
KeyValueHeap kvh =
|
||||
new KeyValueHeap(scanners, CellComparatorImpl.COMPARATOR);
|
||||
|
||||
// Creating KeyValueHeap
|
||||
try (KeyValueHeap kvh = new KeyValueHeap(scanners, CellComparatorImpl.COMPARATOR)) {
|
||||
Cell seekKv = new KeyValue(row2, fam1, null, null);
|
||||
kvh.seek(seekKv);
|
||||
|
||||
List<Cell> actual = Arrays.asList(kvh.peek());
|
||||
|
||||
assertEquals("Expected = " + Arrays.toString(expected.toArray())
|
||||
+ "\n Actual = " + Arrays.toString(actual.toArray()), expected, actual);
|
||||
assertEquals("Expected = " + Arrays.toString(expected.toArray()) + "\n Actual = " +
|
||||
Arrays.toString(actual.toArray()), expected, actual);
|
||||
}
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -144,20 +138,25 @@ public class TestKeyValueHeap extends HBaseTestCase {
|
|||
TestScanner s4 = new TestScanner(new ArrayList<>());
|
||||
scanners.add(s4);
|
||||
|
||||
//Creating KeyValueHeap
|
||||
KeyValueHeap kvh = new KeyValueHeap(scanners, CellComparatorImpl.COMPARATOR);
|
||||
|
||||
while(kvh.next() != null);
|
||||
// Creating KeyValueHeap
|
||||
try (KeyValueHeap kvh = new KeyValueHeap(scanners, CellComparatorImpl.COMPARATOR)) {
|
||||
for (;;) {
|
||||
if (kvh.next() == null) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
// Once the internal scanners go out of Cells, those will be removed from KVHeap's priority
|
||||
// queue and added to a Set for lazy close. The actual close will happen only on KVHeap#close()
|
||||
// queue and added to a Set for lazy close. The actual close will happen only on
|
||||
// KVHeap#close()
|
||||
assertEquals(4, kvh.scannersForDelayedClose.size());
|
||||
assertTrue(kvh.scannersForDelayedClose.contains(s1));
|
||||
assertTrue(kvh.scannersForDelayedClose.contains(s2));
|
||||
assertTrue(kvh.scannersForDelayedClose.contains(s3));
|
||||
assertTrue(kvh.scannersForDelayedClose.contains(s4));
|
||||
kvh.close();
|
||||
for(KeyValueScanner scanner : scanners) {
|
||||
assertTrue(((TestScanner)scanner).isClosed());
|
||||
}
|
||||
|
||||
for (KeyValueScanner scanner : scanners) {
|
||||
assertTrue(((TestScanner) scanner).isClosed());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -173,19 +172,19 @@ public class TestKeyValueHeap extends HBaseTestCase {
|
|||
List<KeyValueScanner> scanners = new ArrayList<>(Arrays.asList(s1, s2, s3, s4));
|
||||
|
||||
// Creating KeyValueHeap
|
||||
KeyValueHeap kvh = new KeyValueHeap(scanners, CellComparatorImpl.COMPARATOR);
|
||||
|
||||
try {
|
||||
try (KeyValueHeap kvh = new KeyValueHeap(scanners, CellComparatorImpl.COMPARATOR)) {
|
||||
for (KeyValueScanner scanner : scanners) {
|
||||
((SeekTestScanner) scanner).setRealSeekDone(false);
|
||||
}
|
||||
while (kvh.next() != null);
|
||||
// The pollRealKV should throw IOE.
|
||||
assertTrue(false);
|
||||
} catch (IOException ioe) {
|
||||
kvh.close();
|
||||
assertThrows(IOException.class, () -> {
|
||||
for (;;) {
|
||||
if (kvh.next() == null) {
|
||||
break;
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
// It implies there is no NPE thrown from kvh.close() if getting here
|
||||
for (KeyValueScanner scanner : scanners) {
|
||||
// Verify that close is called and only called once for each scanner
|
||||
|
@ -198,18 +197,15 @@ public class TestKeyValueHeap extends HBaseTestCase {
|
|||
public void testPriorityId() throws IOException {
|
||||
Cell kv113A = new KeyValue(row1, fam1, col3, Bytes.toBytes("aaa"));
|
||||
Cell kv113B = new KeyValue(row1, fam1, col3, Bytes.toBytes("bbb"));
|
||||
{
|
||||
TestScanner scan1 = new TestScanner(Arrays.asList(kv111, kv112, kv113A), 1);
|
||||
TestScanner scan2 = new TestScanner(Arrays.asList(kv113B), 2);
|
||||
List<Cell> expected = Arrays.asList(kv111, kv112, kv113B, kv113A);
|
||||
assertCells(expected, new ArrayList<>(Arrays.asList(scan1, scan2)));
|
||||
}
|
||||
{
|
||||
TestScanner scan1 = new TestScanner(Arrays.asList(kv111, kv112, kv113A), 2);
|
||||
TestScanner scan2 = new TestScanner(Arrays.asList(kv113B), 1);
|
||||
List<Cell> expected = Arrays.asList(kv111, kv112, kv113A, kv113B);
|
||||
assertCells(expected, new ArrayList<>(Arrays.asList(scan1, scan2)));
|
||||
}
|
||||
assertCells(expected, Arrays.asList(scan1, scan2));
|
||||
|
||||
scan1 = new TestScanner(Arrays.asList(kv111, kv112, kv113A), 2);
|
||||
scan2 = new TestScanner(Arrays.asList(kv113B), 1);
|
||||
expected = Arrays.asList(kv111, kv112, kv113A, kv113B);
|
||||
assertCells(expected, Arrays.asList(scan1, scan2));
|
||||
}
|
||||
|
||||
private static class TestScanner extends CollectionBackedScanner {
|
||||
|
@ -231,7 +227,7 @@ public class TestKeyValueHeap extends HBaseTestCase {
|
|||
}
|
||||
|
||||
@Override
|
||||
public void close(){
|
||||
public void close() {
|
||||
closed = true;
|
||||
}
|
||||
|
||||
|
|
|
@ -37,11 +37,13 @@ import org.apache.hadoop.conf.Configuration;
|
|||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellUtil;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.HTestConst;
|
||||
import org.apache.hadoop.hbase.KeepDeletedCells;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
|
||||
import org.apache.hadoop.hbase.client.Delete;
|
||||
import org.apache.hadoop.hbase.client.Get;
|
||||
import org.apache.hadoop.hbase.client.Result;
|
||||
|
@ -75,7 +77,7 @@ import org.slf4j.LoggerFactory;
|
|||
/**
|
||||
* Test major compactions
|
||||
*/
|
||||
@Category({RegionServerTests.class, LargeTests.class})
|
||||
@Category({ RegionServerTests.class, LargeTests.class })
|
||||
@RunWith(Parameterized.class)
|
||||
public class TestMajorCompaction {
|
||||
|
||||
|
@ -87,16 +89,18 @@ public class TestMajorCompaction {
|
|||
public static Object[] data() {
|
||||
return new Object[] { "NONE", "BASIC", "EAGER" };
|
||||
}
|
||||
@Rule public TestName name;
|
||||
|
||||
@Rule
|
||||
public TestName name;
|
||||
private static final Logger LOG = LoggerFactory.getLogger(TestMajorCompaction.class.getName());
|
||||
private static final HBaseTestingUtility UTIL = HBaseTestingUtility.createLocalHTU();
|
||||
protected Configuration conf = UTIL.getConfiguration();
|
||||
|
||||
private HRegion r = null;
|
||||
private HTableDescriptor htd = null;
|
||||
private static final byte [] COLUMN_FAMILY = fam1;
|
||||
private final byte [] STARTROW = Bytes.toBytes(START_KEY);
|
||||
private static final byte [] COLUMN_FAMILY_TEXT = COLUMN_FAMILY;
|
||||
private static final byte[] COLUMN_FAMILY = fam1;
|
||||
private final byte[] STARTROW = Bytes.toBytes(START_KEY);
|
||||
private static final byte[] COLUMN_FAMILY_TEXT = COLUMN_FAMILY;
|
||||
private int compactionThreshold;
|
||||
private byte[] secondRowBytes, thirdRowBytes;
|
||||
private static final long MAX_FILES_TO_COMPACT = 10;
|
||||
|
@ -106,7 +110,7 @@ public class TestMajorCompaction {
|
|||
super();
|
||||
name = new TestName();
|
||||
// Set cache flush size to 1MB
|
||||
conf.setInt(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, 1024*1024);
|
||||
conf.setInt(HConstants.HREGION_MEMSTORE_FLUSH_SIZE, 1024 * 1024);
|
||||
conf.setInt(HConstants.HREGION_MEMSTORE_BLOCK_MULTIPLIER, 100);
|
||||
compactionThreshold = conf.getInt("hbase.hstore.compactionThreshold", 3);
|
||||
conf.set(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_KEY, String.valueOf(compType));
|
||||
|
@ -121,21 +125,24 @@ public class TestMajorCompaction {
|
|||
|
||||
@Before
|
||||
public void setUp() throws Exception {
|
||||
this.htd = UTIL.createTableDescriptor(name.getMethodName().replace('[','i').replace(']','i'));
|
||||
this.htd = UTIL.createTableDescriptor(
|
||||
TableName.valueOf(name.getMethodName().replace('[', 'i').replace(']', 'i')),
|
||||
ColumnFamilyDescriptorBuilder.DEFAULT_MIN_VERSIONS, 3, HConstants.FOREVER,
|
||||
ColumnFamilyDescriptorBuilder.DEFAULT_KEEP_DELETED);
|
||||
this.r = UTIL.createLocalHRegion(htd, null, null);
|
||||
}
|
||||
|
||||
@After
|
||||
public void tearDown() throws Exception {
|
||||
WAL wal = ((HRegion)r).getWAL();
|
||||
((HRegion)r).close();
|
||||
WAL wal = ((HRegion) r).getWAL();
|
||||
((HRegion) r).close();
|
||||
wal.close();
|
||||
}
|
||||
|
||||
/**
|
||||
* Test that on a major compaction, if all cells are expired or deleted, then
|
||||
* we'll end up with no product. Make sure scanner over region returns
|
||||
* right answer in this case - and that it just basically works.
|
||||
* Test that on a major compaction, if all cells are expired or deleted, then we'll end up with no
|
||||
* product. Make sure scanner over region returns right answer in this case - and that it just
|
||||
* basically works.
|
||||
* @throws IOException exception encountered
|
||||
*/
|
||||
@Test
|
||||
|
@ -153,8 +160,7 @@ public class TestMajorCompaction {
|
|||
}
|
||||
|
||||
/**
|
||||
* Run compaction and flushing memstore
|
||||
* Assert deletes get cleaned up.
|
||||
* Run compaction and flushing memstore Assert deletes get cleaned up.
|
||||
* @throws Exception
|
||||
*/
|
||||
@Test
|
||||
|
@ -172,23 +178,21 @@ public class TestMajorCompaction {
|
|||
majorCompactionWithDataBlockEncoding(false);
|
||||
}
|
||||
|
||||
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly)
|
||||
throws Exception {
|
||||
public void majorCompactionWithDataBlockEncoding(boolean inCacheOnly) throws Exception {
|
||||
Map<HStore, HFileDataBlockEncoder> replaceBlockCache = new HashMap<>();
|
||||
for (HStore store : r.getStores()) {
|
||||
HFileDataBlockEncoder blockEncoder = store.getDataBlockEncoder();
|
||||
replaceBlockCache.put(store, blockEncoder);
|
||||
final DataBlockEncoding inCache = DataBlockEncoding.PREFIX;
|
||||
final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE :
|
||||
inCache;
|
||||
((HStore)store).setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(onDisk));
|
||||
final DataBlockEncoding onDisk = inCacheOnly ? DataBlockEncoding.NONE : inCache;
|
||||
((HStore) store).setDataBlockEncoderInTest(new HFileDataBlockEncoderImpl(onDisk));
|
||||
}
|
||||
|
||||
majorCompaction();
|
||||
|
||||
// restore settings
|
||||
for (Entry<HStore, HFileDataBlockEncoder> entry : replaceBlockCache.entrySet()) {
|
||||
((HStore)entry.getKey()).setDataBlockEncoderInTest(entry.getValue());
|
||||
((HStore) entry.getKey()).setDataBlockEncoderInTest(entry.getValue());
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -198,7 +202,7 @@ public class TestMajorCompaction {
|
|||
createStoreFile(r);
|
||||
}
|
||||
// Add more content.
|
||||
HBaseTestCase.addContent(new RegionAsTable(r), Bytes.toString(COLUMN_FAMILY));
|
||||
HTestConst.addContent(new RegionAsTable(r), Bytes.toString(COLUMN_FAMILY));
|
||||
|
||||
// Now there are about 5 versions of each column.
|
||||
// Default is that there only 3 (MAXVERSIONS) versions allowed per column.
|
||||
|
@ -219,7 +223,7 @@ public class TestMajorCompaction {
|
|||
int storeCount = 0;
|
||||
for (HStore store : r.getStores()) {
|
||||
CompactionProgress progress = store.getCompactionProgress();
|
||||
if( progress != null ) {
|
||||
if (progress != null) {
|
||||
++storeCount;
|
||||
assertTrue(progress.currentCompactedKVs > 0);
|
||||
assertTrue(progress.getTotalCompactingKVs() > 0);
|
||||
|
@ -229,16 +233,15 @@ public class TestMajorCompaction {
|
|||
|
||||
// look at the second row
|
||||
// Increment the least significant character so we get to next row.
|
||||
byte [] secondRowBytes = START_KEY_BYTES.clone();
|
||||
byte[] secondRowBytes = START_KEY_BYTES.clone();
|
||||
secondRowBytes[START_KEY_BYTES.length - 1]++;
|
||||
|
||||
// Always 3 versions if that is what max versions is.
|
||||
result = r.get(new Get(secondRowBytes).addFamily(COLUMN_FAMILY_TEXT).readVersions(100));
|
||||
LOG.debug("Row " + Bytes.toStringBinary(secondRowBytes) + " after " +
|
||||
"initial compaction: " + result);
|
||||
assertEquals("Invalid number of versions of row "
|
||||
+ Bytes.toStringBinary(secondRowBytes) + ".", compactionThreshold,
|
||||
result.size());
|
||||
LOG.debug(
|
||||
"Row " + Bytes.toStringBinary(secondRowBytes) + " after " + "initial compaction: " + result);
|
||||
assertEquals("Invalid number of versions of row " + Bytes.toStringBinary(secondRowBytes) + ".",
|
||||
compactionThreshold, result.size());
|
||||
|
||||
// Now add deletes to memstore and then flush it.
|
||||
// That will put us over
|
||||
|
@ -277,7 +280,7 @@ public class TestMajorCompaction {
|
|||
// Make sure the store files do have some 'aaa' keys in them -- exactly 3.
|
||||
// Also, that compacted store files do not have any secondRowBytes because
|
||||
// they were deleted.
|
||||
verifyCounts(3,0);
|
||||
verifyCounts(3, 0);
|
||||
|
||||
// Multiple versions allowed for an entry, so the delete isn't enough
|
||||
// Lower TTL and expire to ensure that all our entries have been wiped
|
||||
|
@ -315,8 +318,8 @@ public class TestMajorCompaction {
|
|||
assertEquals(2, s.getStorefilesCount());
|
||||
|
||||
// ensure that major compaction time is deterministic
|
||||
RatioBasedCompactionPolicy
|
||||
c = (RatioBasedCompactionPolicy)s.storeEngine.getCompactionPolicy();
|
||||
RatioBasedCompactionPolicy c =
|
||||
(RatioBasedCompactionPolicy) s.storeEngine.getCompactionPolicy();
|
||||
Collection<HStoreFile> storeFiles = s.getStorefiles();
|
||||
long mcTime = c.getNextMajorCompactTime(storeFiles);
|
||||
for (int i = 0; i < 10; ++i) {
|
||||
|
@ -335,7 +338,7 @@ public class TestMajorCompaction {
|
|||
assertEquals(1, s.getStorefilesCount());
|
||||
} finally {
|
||||
// reset the timed compaction settings
|
||||
conf.setLong(HConstants.MAJOR_COMPACTION_PERIOD, 1000*60*60*24);
|
||||
conf.setLong(HConstants.MAJOR_COMPACTION_PERIOD, 1000 * 60 * 60 * 24);
|
||||
conf.setFloat("hbase.hregion.majorcompaction.jitter", 0.20F);
|
||||
// run a major to reset the cache
|
||||
createStoreFile(r);
|
||||
|
@ -347,33 +350,32 @@ public class TestMajorCompaction {
|
|||
private void verifyCounts(int countRow1, int countRow2) throws Exception {
|
||||
int count1 = 0;
|
||||
int count2 = 0;
|
||||
for (HStoreFile f: r.getStore(COLUMN_FAMILY_TEXT).getStorefiles()) {
|
||||
for (HStoreFile f : r.getStore(COLUMN_FAMILY_TEXT).getStorefiles()) {
|
||||
HFileScanner scanner = f.getReader().getScanner(false, false);
|
||||
scanner.seekTo();
|
||||
do {
|
||||
byte [] row = CellUtil.cloneRow(scanner.getCell());
|
||||
byte[] row = CellUtil.cloneRow(scanner.getCell());
|
||||
if (Bytes.equals(row, STARTROW)) {
|
||||
count1++;
|
||||
} else if(Bytes.equals(row, secondRowBytes)) {
|
||||
} else if (Bytes.equals(row, secondRowBytes)) {
|
||||
count2++;
|
||||
}
|
||||
} while(scanner.next());
|
||||
} while (scanner.next());
|
||||
}
|
||||
assertEquals(countRow1,count1);
|
||||
assertEquals(countRow2,count2);
|
||||
assertEquals(countRow1, count1);
|
||||
assertEquals(countRow2, count2);
|
||||
}
|
||||
|
||||
|
||||
private int count() throws IOException {
|
||||
int count = 0;
|
||||
for (HStoreFile f: r.getStore(COLUMN_FAMILY_TEXT).getStorefiles()) {
|
||||
for (HStoreFile f : r.getStore(COLUMN_FAMILY_TEXT).getStorefiles()) {
|
||||
HFileScanner scanner = f.getReader().getScanner(false, false);
|
||||
if (!scanner.seekTo()) {
|
||||
continue;
|
||||
}
|
||||
do {
|
||||
count++;
|
||||
} while(scanner.next());
|
||||
} while (scanner.next());
|
||||
}
|
||||
return count;
|
||||
}
|
||||
|
@ -384,14 +386,13 @@ public class TestMajorCompaction {
|
|||
|
||||
private void createStoreFile(final HRegion region, String family) throws IOException {
|
||||
Table loader = new RegionAsTable(region);
|
||||
HBaseTestCase.addContent(loader, family);
|
||||
HTestConst.addContent(loader, family);
|
||||
region.flush(true);
|
||||
}
|
||||
|
||||
private void createSmallerStoreFile(final HRegion region) throws IOException {
|
||||
Table loader = new RegionAsTable(region);
|
||||
HBaseTestCase.addContent(loader, Bytes.toString(COLUMN_FAMILY), Bytes.toBytes("" +
|
||||
"bbb"), null);
|
||||
HTestConst.addContent(loader, Bytes.toString(COLUMN_FAMILY), Bytes.toBytes("" + "bbb"), null);
|
||||
region.flush(true);
|
||||
}
|
||||
|
||||
|
@ -410,8 +411,7 @@ public class TestMajorCompaction {
|
|||
CompactionRequestImpl request = store.requestCompaction().get().getRequest();
|
||||
assertNotNull("Expected to receive a compaction request", request);
|
||||
assertEquals(
|
||||
"System-requested major compaction should not occur if there are too many store files",
|
||||
false,
|
||||
"System-requested major compaction should not occur if there are too many store files", false,
|
||||
request.isMajor());
|
||||
}
|
||||
|
||||
|
@ -419,21 +419,19 @@ public class TestMajorCompaction {
|
|||
* Test for HBASE-5920
|
||||
*/
|
||||
@Test
|
||||
public void testUserMajorCompactionRequest() throws IOException{
|
||||
public void testUserMajorCompactionRequest() throws IOException {
|
||||
HStore store = r.getStore(COLUMN_FAMILY);
|
||||
createStoreFile(r);
|
||||
for (int i = 0; i < MAX_FILES_TO_COMPACT + 1; i++) {
|
||||
createStoreFile(r);
|
||||
}
|
||||
store.triggerMajorCompaction();
|
||||
CompactionRequestImpl request =
|
||||
store.requestCompaction(PRIORITY_USER, CompactionLifeCycleTracker.DUMMY, null).get()
|
||||
.getRequest();
|
||||
CompactionRequestImpl request = store
|
||||
.requestCompaction(PRIORITY_USER, CompactionLifeCycleTracker.DUMMY, null).get().getRequest();
|
||||
assertNotNull("Expected to receive a compaction request", request);
|
||||
assertEquals(
|
||||
"User-requested major compaction should always occur, even if there are too many store files",
|
||||
true,
|
||||
request.isMajor());
|
||||
true, request.isMajor());
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -30,9 +30,9 @@ import java.util.Optional;
|
|||
import java.util.stream.Collectors;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HTestConst;
|
||||
import org.apache.hadoop.hbase.client.Delete;
|
||||
import org.apache.hadoop.hbase.client.Get;
|
||||
import org.apache.hadoop.hbase.client.Result;
|
||||
|
@ -195,13 +195,13 @@ public class TestMinorCompaction {
|
|||
throws Exception {
|
||||
Table loader = new RegionAsTable(r);
|
||||
for (int i = 0; i < COMPACTION_THRESHOLD + 1; i++) {
|
||||
HBaseTestCase.addContent(loader, Bytes.toString(fam1), Bytes.toString(COL1), FIRST_ROW_BYTES,
|
||||
HTestConst.addContent(loader, Bytes.toString(fam1), Bytes.toString(COL1), FIRST_ROW_BYTES,
|
||||
THIRD_ROW_BYTES, i);
|
||||
HBaseTestCase.addContent(loader, Bytes.toString(fam1), Bytes.toString(COL2), FIRST_ROW_BYTES,
|
||||
HTestConst.addContent(loader, Bytes.toString(fam1), Bytes.toString(COL2), FIRST_ROW_BYTES,
|
||||
THIRD_ROW_BYTES, i);
|
||||
HBaseTestCase.addContent(loader, Bytes.toString(fam2), Bytes.toString(COL1), FIRST_ROW_BYTES,
|
||||
HTestConst.addContent(loader, Bytes.toString(fam2), Bytes.toString(COL1), FIRST_ROW_BYTES,
|
||||
THIRD_ROW_BYTES, i);
|
||||
HBaseTestCase.addContent(loader, Bytes.toString(fam2), Bytes.toString(COL2), FIRST_ROW_BYTES,
|
||||
HTestConst.addContent(loader, Bytes.toString(fam2), Bytes.toString(COL2), FIRST_ROW_BYTES,
|
||||
THIRD_ROW_BYTES, i);
|
||||
r.flush(true);
|
||||
}
|
||||
|
|
|
@ -31,12 +31,12 @@ import java.util.List;
|
|||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellUtil;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.HTestConst;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.UnknownScannerException;
|
||||
import org.apache.hadoop.hbase.client.Delete;
|
||||
|
@ -132,7 +132,7 @@ public class TestScanner {
|
|||
byte [] stoprow = Bytes.toBytes("ccc");
|
||||
try {
|
||||
this.region = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
|
||||
HBaseTestCase.addContent(this.region, HConstants.CATALOG_FAMILY);
|
||||
HTestConst.addContent(this.region, HConstants.CATALOG_FAMILY);
|
||||
List<Cell> results = new ArrayList<>();
|
||||
// Do simple test of getting one row only first.
|
||||
Scan scan = new Scan(Bytes.toBytes("abc"), Bytes.toBytes("abd"));
|
||||
|
@ -205,7 +205,7 @@ public class TestScanner {
|
|||
public void testFilters() throws IOException {
|
||||
try {
|
||||
this.region = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
|
||||
HBaseTestCase.addContent(this.region, HConstants.CATALOG_FAMILY);
|
||||
HTestConst.addContent(this.region, HConstants.CATALOG_FAMILY);
|
||||
byte [] prefix = Bytes.toBytes("ab");
|
||||
Filter newFilter = new PrefixFilter(prefix);
|
||||
Scan scan = new Scan();
|
||||
|
@ -231,7 +231,7 @@ public class TestScanner {
|
|||
public void testRaceBetweenClientAndTimeout() throws Exception {
|
||||
try {
|
||||
this.region = TEST_UTIL.createLocalHRegion(TESTTABLEDESC, null, null);
|
||||
HBaseTestCase.addContent(this.region, HConstants.CATALOG_FAMILY);
|
||||
HTestConst.addContent(this.region, HConstants.CATALOG_FAMILY);
|
||||
Scan scan = new Scan();
|
||||
InternalScanner s = region.getScanner(scan);
|
||||
List<Cell> results = new ArrayList<>();
|
||||
|
@ -463,7 +463,7 @@ public class TestScanner {
|
|||
Table hri = new RegionAsTable(region);
|
||||
try {
|
||||
LOG.info("Added: " +
|
||||
HBaseTestCase.addContent(hri, Bytes.toString(HConstants.CATALOG_FAMILY),
|
||||
HTestConst.addContent(hri, Bytes.toString(HConstants.CATALOG_FAMILY),
|
||||
Bytes.toString(HConstants.REGIONINFO_QUALIFIER)));
|
||||
int count = count(hri, -1, false);
|
||||
assertEquals(count, count(hri, 100, false)); // do a sync flush.
|
||||
|
@ -485,7 +485,7 @@ public class TestScanner {
|
|||
Table hri = new RegionAsTable(region);
|
||||
try {
|
||||
LOG.info("Added: " +
|
||||
HBaseTestCase.addContent(hri, Bytes.toString(HConstants.CATALOG_FAMILY),
|
||||
HTestConst.addContent(hri, Bytes.toString(HConstants.CATALOG_FAMILY),
|
||||
Bytes.toString(HConstants.REGIONINFO_QUALIFIER)));
|
||||
int count = count(hri, -1, false);
|
||||
assertEquals(count, count(hri, 100, true)); // do a true concurrent background thread flush
|
||||
|
@ -509,9 +509,9 @@ public class TestScanner {
|
|||
Table hri = new RegionAsTable(region);
|
||||
|
||||
try {
|
||||
HBaseTestCase.addContent(hri, Bytes.toString(fam1), Bytes.toString(col1),
|
||||
HTestConst.addContent(hri, Bytes.toString(fam1), Bytes.toString(col1),
|
||||
firstRowBytes, secondRowBytes);
|
||||
HBaseTestCase.addContent(hri, Bytes.toString(fam2), Bytes.toString(col1),
|
||||
HTestConst.addContent(hri, Bytes.toString(fam2), Bytes.toString(col1),
|
||||
firstRowBytes, secondRowBytes);
|
||||
|
||||
Delete dc = new Delete(firstRowBytes);
|
||||
|
@ -520,9 +520,9 @@ public class TestScanner {
|
|||
region.delete(dc);
|
||||
region.flush(true);
|
||||
|
||||
HBaseTestCase.addContent(hri, Bytes.toString(fam1), Bytes.toString(col1),
|
||||
HTestConst.addContent(hri, Bytes.toString(fam1), Bytes.toString(col1),
|
||||
secondRowBytes, thirdRowBytes);
|
||||
HBaseTestCase.addContent(hri, Bytes.toString(fam2), Bytes.toString(col1),
|
||||
HTestConst.addContent(hri, Bytes.toString(fam2), Bytes.toString(col1),
|
||||
secondRowBytes, thirdRowBytes);
|
||||
region.flush(true);
|
||||
|
||||
|
|
|
@ -17,61 +17,88 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.Collections;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Random;
|
||||
import java.util.concurrent.ThreadLocalRandom;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellUtil;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.TableName;
|
||||
import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
|
||||
import org.apache.hadoop.hbase.client.Durability;
|
||||
import org.apache.hadoop.hbase.client.Put;
|
||||
import org.apache.hadoop.hbase.client.RegionInfo;
|
||||
import org.apache.hadoop.hbase.client.RegionInfoBuilder;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.client.TableDescriptor;
|
||||
import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
|
||||
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.ClassRule;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
|
||||
@Category({RegionServerTests.class, SmallTests.class})
|
||||
public class TestWideScanner extends HBaseTestCase {
|
||||
@Category({ RegionServerTests.class, SmallTests.class })
|
||||
public class TestWideScanner {
|
||||
|
||||
@ClassRule
|
||||
public static final HBaseClassTestRule CLASS_RULE =
|
||||
HBaseClassTestRule.forClass(TestWideScanner.class);
|
||||
|
||||
private static final HBaseTestingUtility UTIL = new HBaseTestingUtility();
|
||||
|
||||
private static final Logger LOG = LoggerFactory.getLogger(TestWideScanner.class);
|
||||
|
||||
static final byte[] A = Bytes.toBytes("A");
|
||||
static final byte[] B = Bytes.toBytes("B");
|
||||
static final byte[] C = Bytes.toBytes("C");
|
||||
static byte[][] COLUMNS = { A, B, C };
|
||||
static final Random rng = new Random();
|
||||
static final HTableDescriptor TESTTABLEDESC =
|
||||
new HTableDescriptor(TableName.valueOf("testwidescan"));
|
||||
private static final byte[] A = Bytes.toBytes("A");
|
||||
private static final byte[] B = Bytes.toBytes("B");
|
||||
private static final byte[] C = Bytes.toBytes("C");
|
||||
private static byte[][] COLUMNS = { A, B, C };
|
||||
|
||||
private static final TableDescriptor TESTTABLEDESC;
|
||||
static {
|
||||
TableDescriptorBuilder builder =
|
||||
TableDescriptorBuilder.newBuilder(TableName.valueOf("testwidescan"));
|
||||
for (byte[] cfName : new byte[][] { A, B, C }) {
|
||||
TESTTABLEDESC.addFamily(new HColumnDescriptor(cfName)
|
||||
// Keep versions to help debugging.
|
||||
.setMaxVersions(100)
|
||||
.setBlocksize(8 * 1024)
|
||||
);
|
||||
builder.setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(cfName).setMaxVersions(100)
|
||||
.setBlocksize(8 * 1024).build());
|
||||
}
|
||||
TESTTABLEDESC = builder.build();
|
||||
}
|
||||
|
||||
/** HRegionInfo for root region */
|
||||
HRegion r;
|
||||
private static HRegion REGION;
|
||||
|
||||
@BeforeClass
|
||||
public static void setUp() throws IOException {
|
||||
Path testDir = UTIL.getDataTestDir();
|
||||
RegionInfo hri = RegionInfoBuilder.newBuilder(TESTTABLEDESC.getTableName()).build();
|
||||
REGION =
|
||||
HBaseTestingUtility.createRegionAndWAL(hri, testDir, UTIL.getConfiguration(), TESTTABLEDESC);
|
||||
}
|
||||
|
||||
@AfterClass
|
||||
public static void tearDown() throws IOException {
|
||||
if (REGION != null) {
|
||||
HBaseTestingUtility.closeRegionAndWAL(REGION);
|
||||
REGION = null;
|
||||
}
|
||||
UTIL.cleanupTestDir();
|
||||
}
|
||||
|
||||
private int addWideContent(HRegion region) throws IOException {
|
||||
int count = 0;
|
||||
|
@ -85,7 +112,7 @@ public class TestWideScanner extends HBaseTestCase {
|
|||
Put put = new Put(row);
|
||||
put.setDurability(Durability.SKIP_WAL);
|
||||
long ts1 = ++ts;
|
||||
put.addColumn(COLUMNS[rng.nextInt(COLUMNS.length)], b, ts1, b);
|
||||
put.addColumn(COLUMNS[ThreadLocalRandom.current().nextInt(COLUMNS.length)], b, ts1, b);
|
||||
region.put(put);
|
||||
count++;
|
||||
}
|
||||
|
@ -97,17 +124,15 @@ public class TestWideScanner extends HBaseTestCase {
|
|||
@Test
|
||||
public void testWideScanBatching() throws IOException {
|
||||
final int batch = 256;
|
||||
try {
|
||||
this.r = createNewHRegion(TESTTABLEDESC, null, null);
|
||||
int inserted = addWideContent(this.r);
|
||||
int inserted = addWideContent(REGION);
|
||||
List<Cell> results = new ArrayList<>();
|
||||
Scan scan = new Scan();
|
||||
scan.addFamily(A);
|
||||
scan.addFamily(B);
|
||||
scan.addFamily(C);
|
||||
scan.setMaxVersions(100);
|
||||
scan.readVersions(100);
|
||||
scan.setBatch(batch);
|
||||
InternalScanner s = r.getScanner(scan);
|
||||
try (InternalScanner s = REGION.getScanner(scan)) {
|
||||
int total = 0;
|
||||
int i = 0;
|
||||
boolean more;
|
||||
|
@ -124,7 +149,7 @@ public class TestWideScanner extends HBaseTestCase {
|
|||
if (results.size() > 0) {
|
||||
// assert that all results are from the same row
|
||||
byte[] row = CellUtil.cloneRow(results.get(0));
|
||||
for (Cell kv: results) {
|
||||
for (Cell kv : results) {
|
||||
assertTrue(Bytes.equals(row, CellUtil.cloneRow(kv)));
|
||||
}
|
||||
}
|
||||
|
@ -132,23 +157,16 @@ public class TestWideScanner extends HBaseTestCase {
|
|||
results.clear();
|
||||
|
||||
// trigger ChangedReadersObservers
|
||||
Iterator<KeyValueScanner> scanners =
|
||||
((RegionScannerImpl) s).storeHeap.getHeap().iterator();
|
||||
Iterator<KeyValueScanner> scanners = ((RegionScannerImpl) s).storeHeap.getHeap().iterator();
|
||||
while (scanners.hasNext()) {
|
||||
StoreScanner ss = (StoreScanner)scanners.next();
|
||||
ss.updateReaders(Collections.EMPTY_LIST, Collections.EMPTY_LIST);
|
||||
StoreScanner ss = (StoreScanner) scanners.next();
|
||||
ss.updateReaders(Collections.emptyList(), Collections.emptyList());
|
||||
}
|
||||
} while (more);
|
||||
|
||||
// assert that the scanner returned all values
|
||||
LOG.info("inserted " + inserted + ", scanned " + total);
|
||||
assertEquals(total, inserted);
|
||||
|
||||
s.close();
|
||||
} finally {
|
||||
HBaseTestingUtility.closeRegionAndWAL(this.r);
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
|
|
Loading…
Reference in New Issue