HBASE-21922 BloomContext#sanityCheck may failed when use ROWPREFIX_DELIMITED bloom filter

This commit is contained in:
Guanghao Zhang 2019-02-23 16:33:25 +08:00
parent 714e5225e8
commit 607ac735c4
64 changed files with 227 additions and 441 deletions

View File

@ -38,9 +38,5 @@ public enum BloomType {
/** /**
* Bloom enabled with Table row prefix as Key, specify the length of the prefix * Bloom enabled with Table row prefix as Key, specify the length of the prefix
*/ */
ROWPREFIX_FIXED_LENGTH, ROWPREFIX_FIXED_LENGTH
/**
* Bloom enabled with Table row prefix as Key, specify the delimiter of the prefix
*/
ROWPREFIX_DELIMITED
} }

View File

@ -56,8 +56,6 @@ public class ChangeBloomFilterAction extends Action {
columnBuilder.setBloomFilterType(bloomType); columnBuilder.setBloomFilterType(bloomType);
if (bloomType == BloomType.ROWPREFIX_FIXED_LENGTH) { if (bloomType == BloomType.ROWPREFIX_FIXED_LENGTH) {
columnBuilder.setConfiguration(BloomFilterUtil.PREFIX_LENGTH_KEY, "10"); columnBuilder.setConfiguration(BloomFilterUtil.PREFIX_LENGTH_KEY, "10");
} else if (bloomType == BloomType.ROWPREFIX_DELIMITED) {
columnBuilder.setConfiguration(BloomFilterUtil.DELIMITER_KEY, "#");
} }
}); });

View File

@ -390,8 +390,6 @@ public class HFileOutputFormat2
String bloomParam = bloomParamMap.get(tableAndFamily); String bloomParam = bloomParamMap.get(tableAndFamily);
if (bloomType == BloomType.ROWPREFIX_FIXED_LENGTH) { if (bloomType == BloomType.ROWPREFIX_FIXED_LENGTH) {
conf.set(BloomFilterUtil.PREFIX_LENGTH_KEY, bloomParam); conf.set(BloomFilterUtil.PREFIX_LENGTH_KEY, bloomParam);
} else if (bloomType == BloomType.ROWPREFIX_DELIMITED) {
conf.set(BloomFilterUtil.DELIMITER_KEY, bloomParam);
} }
Integer blockSize = blockSizeMap.get(tableAndFamily); Integer blockSize = blockSizeMap.get(tableAndFamily);
blockSize = blockSize == null ? HConstants.DEFAULT_BLOCKSIZE : blockSize; blockSize = blockSize == null ? HConstants.DEFAULT_BLOCKSIZE : blockSize;
@ -932,8 +930,6 @@ public class HFileOutputFormat2
String bloomParam = ""; String bloomParam = "";
if (bloomType == BloomType.ROWPREFIX_FIXED_LENGTH) { if (bloomType == BloomType.ROWPREFIX_FIXED_LENGTH) {
bloomParam = familyDescriptor.getConfigurationValue(BloomFilterUtil.PREFIX_LENGTH_KEY); bloomParam = familyDescriptor.getConfigurationValue(BloomFilterUtil.PREFIX_LENGTH_KEY);
} else if (bloomType == BloomType.ROWPREFIX_DELIMITED) {
bloomParam = familyDescriptor.getConfigurationValue(BloomFilterUtil.DELIMITER_KEY);
} }
return bloomParam; return bloomParam;
}; };

View File

@ -561,14 +561,6 @@ public class LoadTestTool extends AbstractHBaseTool {
} }
} }
if (bloomType == BloomType.ROWPREFIX_DELIMITED) {
if (!cmd.hasOption(OPT_BLOOM_PARAM)) {
LOG.error("the parameter of bloom filter {} is not specified", bloomType.name());
} else {
conf.set(BloomFilterUtil.DELIMITER_KEY, cmd.getOptionValue(OPT_BLOOM_PARAM));
}
}
inMemoryCF = cmd.hasOption(OPT_INMEMORY); inMemoryCF = cmd.hasOption(OPT_INMEMORY);
if (cmd.hasOption(OPT_ENCRYPTION)) { if (cmd.hasOption(OPT_ENCRYPTION)) {
cipher = Encryption.getCipher(conf, cmd.getOptionValue(OPT_ENCRYPTION)); cipher = Encryption.getCipher(conf, cmd.getOptionValue(OPT_ENCRYPTION));

View File

@ -76,7 +76,6 @@ public class StoreFileReader {
private KeyValue.KeyOnlyKeyValue lastBloomKeyOnlyKV = null; private KeyValue.KeyOnlyKeyValue lastBloomKeyOnlyKV = null;
private boolean skipResetSeqId = true; private boolean skipResetSeqId = true;
private int prefixLength = -1; private int prefixLength = -1;
private byte[] delimiter = null;
// Counter that is incremented every time a scanner is created on the // Counter that is incremented every time a scanner is created on the
// store file. It is decremented when the scan on the store file is // store file. It is decremented when the scan on the store file is
@ -123,7 +122,6 @@ public class StoreFileReader {
this.lastBloomKeyOnlyKV = reader.lastBloomKeyOnlyKV; this.lastBloomKeyOnlyKV = reader.lastBloomKeyOnlyKV;
this.skipResetSeqId = reader.skipResetSeqId; this.skipResetSeqId = reader.skipResetSeqId;
this.prefixLength = reader.prefixLength; this.prefixLength = reader.prefixLength;
this.delimiter = reader.delimiter;
} }
public boolean isPrimaryReplicaReader() { public boolean isPrimaryReplicaReader() {
@ -295,8 +293,6 @@ public class StoreFileReader {
return true; return true;
case ROWPREFIX_FIXED_LENGTH: case ROWPREFIX_FIXED_LENGTH:
return passesGeneralRowPrefixBloomFilter(scan); return passesGeneralRowPrefixBloomFilter(scan);
case ROWPREFIX_DELIMITED:
return passesGeneralDelimitedRowPrefixBloomFilter(scan);
default: default:
return true; return true;
} }
@ -408,45 +404,6 @@ public class StoreFileReader {
return checkGeneralBloomFilter(rowPrefix, null, bloomFilter); return checkGeneralBloomFilter(rowPrefix, null, bloomFilter);
} }
/**
* A method for checking Bloom filters. Called directly from
* StoreFileScanner in case of a multi-column query.
*
* @return True if passes
*/
private boolean passesGeneralDelimitedRowPrefixBloomFilter(Scan scan) {
BloomFilter bloomFilter = this.generalBloomFilter;
if (bloomFilter == null) {
return true;
}
byte[] row = scan.getStartRow();
byte[] rowPrefix;
if (scan.isGetScan()) {
int rowPrefixLength = Bytes.indexOf(row, delimiter);
if (rowPrefixLength <= 0) {
rowPrefix = row;
} else {
rowPrefix = Bytes.copy(row, 0, rowPrefixLength);
}
} else {
// For non-get scans
// If startRow does not contain delimiter, return true directly.
int startRowPrefixLength = Bytes.indexOf(row, delimiter);
if (startRowPrefixLength <= 0) {
return true;
}
// If stopRow does not have the same prefix as startRow, return true directly.
int commonLength = Bytes.findCommonPrefix(scan.getStartRow(), scan.getStopRow(),
startRowPrefixLength, scan.getStopRow().length, 0, 0);
if (commonLength < startRowPrefixLength) {
return true;
}
rowPrefix = Bytes.copy(row, 0, startRowPrefixLength);
}
return checkGeneralBloomFilter(rowPrefix, null, bloomFilter);
}
private boolean checkGeneralBloomFilter(byte[] key, Cell kvKey, BloomFilter bloomFilter) { private boolean checkGeneralBloomFilter(byte[] key, Cell kvKey, BloomFilter bloomFilter) {
// Empty file // Empty file
if (reader.getTrailer().getEntryCount() == 0) { if (reader.getTrailer().getEntryCount() == 0) {
@ -557,8 +514,6 @@ public class StoreFileReader {
byte[] p = fi.get(BLOOM_FILTER_PARAM_KEY); byte[] p = fi.get(BLOOM_FILTER_PARAM_KEY);
if (bloomFilterType == BloomType.ROWPREFIX_FIXED_LENGTH) { if (bloomFilterType == BloomType.ROWPREFIX_FIXED_LENGTH) {
prefixLength = Bytes.toInt(p); prefixLength = Bytes.toInt(p);
} else if (bloomFilterType == BloomType.ROWPREFIX_DELIMITED) {
delimiter = p;
} }
lastBloomKey = fi.get(LAST_BLOOM_KEY); lastBloomKey = fi.get(LAST_BLOOM_KEY);
@ -762,12 +717,7 @@ public class StoreFileReader {
void storeFileReaderClosed(StoreFileReader reader); void storeFileReaderClosed(StoreFileReader reader);
} }
public int getPrefixLength() { public int getPrefixLength() {
return prefixLength; return prefixLength;
} }
public byte[] getDelimiter() {
return delimiter;
}
} }

View File

@ -51,7 +51,6 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.RowBloomContext; import org.apache.hadoop.hbase.util.RowBloomContext;
import org.apache.hadoop.hbase.util.RowColBloomContext; import org.apache.hadoop.hbase.util.RowColBloomContext;
import org.apache.hadoop.hbase.util.RowPrefixDelimiterBloomContext;
import org.apache.hadoop.hbase.util.RowPrefixFixedLengthBloomContext; import org.apache.hadoop.hbase.util.RowPrefixFixedLengthBloomContext;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger; import org.slf4j.Logger;
@ -134,13 +133,9 @@ public class StoreFileWriter implements CellSink, ShipperListener {
bloomContext = new RowPrefixFixedLengthBloomContext(generalBloomFilterWriter, comparator, bloomContext = new RowPrefixFixedLengthBloomContext(generalBloomFilterWriter, comparator,
Bytes.toInt(bloomParam)); Bytes.toInt(bloomParam));
break; break;
case ROWPREFIX_DELIMITED:
bloomContext = new RowPrefixDelimiterBloomContext(generalBloomFilterWriter, comparator,
bloomParam);
break;
default: default:
throw new IOException("Invalid Bloom filter type: " throw new IOException(
+ bloomType + " (ROW or ROWCOL or ROWPREFIX or ROWPREFIX_DELIMITED expected)"); "Invalid Bloom filter type: " + bloomType + " (ROW or ROWCOL or ROWPREFIX expected)");
} }
} else { } else {
// Not using Bloom filters. // Not using Bloom filters.
@ -222,11 +217,10 @@ public class StoreFileWriter implements CellSink, ShipperListener {
* http://2.bp.blogspot.com/_Cib_A77V54U/StZMrzaKufI/AAAAAAAAADo/ZhK7bGoJdMQ/s400/KeyValue.png * http://2.bp.blogspot.com/_Cib_A77V54U/StZMrzaKufI/AAAAAAAAADo/ZhK7bGoJdMQ/s400/KeyValue.png
* Key = RowLen + Row + FamilyLen + Column [Family + Qualifier] + Timestamp * Key = RowLen + Row + FamilyLen + Column [Family + Qualifier] + Timestamp
* *
* 4 Types of Filtering: * 3 Types of Filtering:
* 1. Row = Row * 1. Row = Row
* 2. RowCol = Row + Qualifier * 2. RowCol = Row + Qualifier
* 3. RowPrefixFixedLength = Fixed Length Row Prefix * 3. RowPrefixFixedLength = Fixed Length Row Prefix
* 4. RowPrefixDelimiter = Delimited Row Prefix
*/ */
bloomContext.writeBloom(cell); bloomContext.writeBloom(cell);
} }

View File

@ -17,6 +17,8 @@
*/ */
package org.apache.hadoop.hbase.util; package org.apache.hadoop.hbase.util;
import static org.apache.hadoop.hbase.regionserver.BloomType.ROWPREFIX_FIXED_LENGTH;
import java.text.NumberFormat; import java.text.NumberFormat;
import java.util.Random; import java.util.Random;
@ -50,7 +52,6 @@ public final class BloomFilterUtil {
private static Random randomGeneratorForTest; private static Random randomGeneratorForTest;
public static final String PREFIX_LENGTH_KEY = "RowPrefixBloomFilter.prefix_length"; public static final String PREFIX_LENGTH_KEY = "RowPrefixBloomFilter.prefix_length";
public static final String DELIMITER_KEY = "RowPrefixDelimitedBloomFilter.delimiter";
/** Bit-value lookup array to prevent doing the same work over and over */ /** Bit-value lookup array to prevent doing the same work over and over */
public static final byte [] bitvals = { public static final byte [] bitvals = {
@ -294,8 +295,7 @@ public final class BloomFilterUtil {
throws IllegalArgumentException { throws IllegalArgumentException {
byte[] bloomParam = null; byte[] bloomParam = null;
String message = "Bloom filter type is " + bloomFilterType + ", "; String message = "Bloom filter type is " + bloomFilterType + ", ";
switch (bloomFilterType) { if (bloomFilterType.equals(ROWPREFIX_FIXED_LENGTH)) {
case ROWPREFIX_FIXED_LENGTH:
String prefixLengthString = conf.get(PREFIX_LENGTH_KEY); String prefixLengthString = conf.get(PREFIX_LENGTH_KEY);
if (prefixLengthString == null) { if (prefixLengthString == null) {
message += PREFIX_LENGTH_KEY + " not specified."; message += PREFIX_LENGTH_KEY + " not specified.";
@ -305,28 +305,16 @@ public final class BloomFilterUtil {
try { try {
prefixLength = Integer.parseInt(prefixLengthString); prefixLength = Integer.parseInt(prefixLengthString);
if (prefixLength <= 0 || prefixLength > HConstants.MAX_ROW_LENGTH) { if (prefixLength <= 0 || prefixLength > HConstants.MAX_ROW_LENGTH) {
message += "the value of " + PREFIX_LENGTH_KEY message +=
+ " must >=0 and < " + HConstants.MAX_ROW_LENGTH; "the value of " + PREFIX_LENGTH_KEY + " must >=0 and < " + HConstants.MAX_ROW_LENGTH;
throw new IllegalArgumentException(message); throw new IllegalArgumentException(message);
} }
} catch (NumberFormatException nfe) { } catch (NumberFormatException nfe) {
message = "Number format exception when parsing " + PREFIX_LENGTH_KEY + " for BloomType " message = "Number format exception when parsing " + PREFIX_LENGTH_KEY + " for BloomType " +
+ bloomFilterType.toString() + ":" bloomFilterType.toString() + ":" + prefixLengthString;
+ prefixLengthString;
throw new IllegalArgumentException(message, nfe); throw new IllegalArgumentException(message, nfe);
} }
bloomParam = Bytes.toBytes(prefixLength); bloomParam = Bytes.toBytes(prefixLength);
break;
case ROWPREFIX_DELIMITED:
String delimiterString = conf.get(DELIMITER_KEY);
if (delimiterString == null || delimiterString.length() == 0) {
message += DELIMITER_KEY + " not specified.";
throw new IllegalArgumentException(message);
}
bloomParam = Bytes.toBytes(delimiterString);
break;
default:
break;
} }
return bloomParam; return bloomParam;
} }

View File

@ -1,62 +0,0 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.util;
import java.io.IOException;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellComparator;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
import org.apache.yetus.audience.InterfaceAudience;
/**
* Handles ROWPREFIX_DELIMITED bloom related context.
* It works with both ByteBufferedCell and byte[] backed cells
*/
@InterfaceAudience.Private
public class RowPrefixDelimiterBloomContext extends RowBloomContext {
private final byte[] delimiter;
public RowPrefixDelimiterBloomContext(BloomFilterWriter bloomFilterWriter,
CellComparator comparator, byte[] delimiter) {
super(bloomFilterWriter, comparator);
this.delimiter = delimiter;
}
public void writeBloom(Cell cell) throws IOException {
super.writeBloom(getDelimitedRowPrefixCell(cell));
}
/**
* @param cell the new cell
* @return the new cell created by delimited row prefix
*/
private Cell getDelimitedRowPrefixCell(Cell cell) {
byte[] row = CellUtil.copyRow(cell);
int prefixLength = Bytes.indexOf(row, delimiter);
if (prefixLength <= 0) {
return cell;
}
return ExtendedCellBuilderFactory.create(CellBuilderType.DEEP_COPY)
.setRow(row, 0, Math.min(prefixLength, row.length))
.setType(Cell.Type.Put)
.build();
}
}

View File

@ -82,7 +82,6 @@ public class TestSeekBeforeWithInlineBlocks {
public void testMultiIndexLevelRandomHFileWithBlooms() throws IOException { public void testMultiIndexLevelRandomHFileWithBlooms() throws IOException {
conf = TEST_UTIL.getConfiguration(); conf = TEST_UTIL.getConfiguration();
TEST_UTIL.getConfiguration().setInt(BloomFilterUtil.PREFIX_LENGTH_KEY, 10); TEST_UTIL.getConfiguration().setInt(BloomFilterUtil.PREFIX_LENGTH_KEY, 10);
TEST_UTIL.getConfiguration().set(BloomFilterUtil.DELIMITER_KEY, "#");
// Try out different HFile versions to ensure reverse scan works on each version // Try out different HFile versions to ensure reverse scan works on each version
for (int hfileVersion = HFile.MIN_FORMAT_VERSION_WITH_TAGS; for (int hfileVersion = HFile.MIN_FORMAT_VERSION_WITH_TAGS;
@ -105,7 +104,6 @@ public class TestSeekBeforeWithInlineBlocks {
conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, indexBlockSize); conf.setInt(HFileBlockIndex.MAX_CHUNK_SIZE_KEY, indexBlockSize);
conf.setInt(BloomFilterFactory.IO_STOREFILE_BLOOM_BLOCK_SIZE, BLOOM_BLOCK_SIZE); conf.setInt(BloomFilterFactory.IO_STOREFILE_BLOOM_BLOCK_SIZE, BLOOM_BLOCK_SIZE);
conf.setInt(BloomFilterUtil.PREFIX_LENGTH_KEY, 10); conf.setInt(BloomFilterUtil.PREFIX_LENGTH_KEY, 10);
conf.set(BloomFilterUtil.DELIMITER_KEY, "#");
Cell[] cells = new Cell[NUM_KV]; Cell[] cells = new Cell[NUM_KV];

View File

@ -183,15 +183,6 @@ public class CreateRandomStoreFile {
} }
} }
if (bloomType == BloomType.ROWPREFIX_DELIMITED) {
if (!cmdLine.hasOption(BLOOM_FILTER_PARAM_OPTION)) {
LOG.error("the parameter of bloom filter is not specified");
return false;
} else {
conf.set(BloomFilterUtil.DELIMITER_KEY, cmdLine.getOptionValue(BLOOM_FILTER_PARAM_OPTION));
}
}
int blockSize = HConstants.DEFAULT_BLOCKSIZE; int blockSize = HConstants.DEFAULT_BLOCKSIZE;
if (cmdLine.hasOption(BLOCK_SIZE_OPTION)) if (cmdLine.hasOption(BLOCK_SIZE_OPTION))
blockSize = Integer.valueOf(cmdLine.getOptionValue(BLOCK_SIZE_OPTION)); blockSize = Integer.valueOf(cmdLine.getOptionValue(BLOCK_SIZE_OPTION));

View File

@ -138,7 +138,6 @@ public abstract class TestMultiColumnScanner {
@Test @Test
public void testMultiColumnScanner() throws IOException { public void testMultiColumnScanner() throws IOException {
TEST_UTIL.getConfiguration().setInt(BloomFilterUtil.PREFIX_LENGTH_KEY, 10); TEST_UTIL.getConfiguration().setInt(BloomFilterUtil.PREFIX_LENGTH_KEY, 10);
TEST_UTIL.getConfiguration().set(BloomFilterUtil.DELIMITER_KEY, "#");
HRegion region = TEST_UTIL.createTestRegion(TABLE_NAME, HRegion region = TEST_UTIL.createTestRegion(TABLE_NAME,
ColumnFamilyDescriptorBuilder.newBuilder(FAMILY_BYTES).setCompressionType(comprAlgo) ColumnFamilyDescriptorBuilder.newBuilder(FAMILY_BYTES).setCompressionType(comprAlgo)
.setBloomFilterType(bloomType).setMaxVersions(MAX_VERSIONS) .setBloomFilterType(bloomType).setMaxVersions(MAX_VERSIONS)

View File

@ -78,10 +78,13 @@ public class TestRowPrefixBloomFilter {
private static final int BLOCKSIZE_SMALL = 8192; private static final int BLOCKSIZE_SMALL = 8192;
private static final float err = (float) 0.01; private static final float err = (float) 0.01;
private static final int prefixLength = 10; private static final int prefixLength = 10;
private static final String delimiter = "#";
private static final String invalidFormatter = "%08d"; private static final String invalidFormatter = "%08d";
private static final String prefixFormatter = "%010d"; private static final String prefixFormatter = "%010d";
private static final String suffixFormatter = "%010d"; private static final String suffixFormatter = "%010d";
private static final int prefixRowCount = 50;
private static final int suffixRowCount = 10;
private static final int fixedLengthExpKeys = prefixRowCount;
private static final BloomType bt = BloomType.ROWPREFIX_FIXED_LENGTH;
@Rule @Rule
public TestName name = new TestName(); public TestName name = new TestName();
@ -92,7 +95,6 @@ public class TestRowPrefixBloomFilter {
conf.setFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, err); conf.setFloat(BloomFilterFactory.IO_STOREFILE_BLOOM_ERROR_RATE, err);
conf.setBoolean(BloomFilterFactory.IO_STOREFILE_BLOOM_ENABLED, true); conf.setBoolean(BloomFilterFactory.IO_STOREFILE_BLOOM_ENABLED, true);
conf.setInt(BloomFilterUtil.PREFIX_LENGTH_KEY, prefixLength); conf.setInt(BloomFilterUtil.PREFIX_LENGTH_KEY, prefixLength);
conf.set(BloomFilterUtil.DELIMITER_KEY, delimiter);
localfs = localfs =
(conf.get("fs.defaultFS", "file:///").compareTo("file:///") == 0); (conf.get("fs.defaultFS", "file:///").compareTo("file:///") == 0);
@ -132,8 +134,7 @@ public class TestRowPrefixBloomFilter {
return reader.getStoreFileScanner(false, false, false, 0, 0, false); return reader.getStoreFileScanner(false, false, false, 0, 0, false);
} }
private void writeStoreFile(final Path f, BloomType bt, int expKeys, int prefixRowCount, private void writeStoreFile(final Path f, BloomType bt, int expKeys) throws IOException {
int suffixRowCount) throws IOException {
HFileContext meta = new HFileContextBuilder() HFileContext meta = new HFileContextBuilder()
.withBlockSize(BLOCKSIZE_SMALL) .withBlockSize(BLOCKSIZE_SMALL)
.withChecksumType(CKTYPE) .withChecksumType(CKTYPE)
@ -152,9 +153,10 @@ public class TestRowPrefixBloomFilter {
for (int i = 0; i < prefixRowCount; i += 2) { // prefix rows for (int i = 0; i < prefixRowCount; i += 2) { // prefix rows
String prefixRow = String.format(prefixFormatter, i); String prefixRow = String.format(prefixFormatter, i);
for (int j = 0; j < suffixRowCount; j++) { // suffix rows for (int j = 0; j < suffixRowCount; j++) { // suffix rows
String row = prefixRow + "#" + String.format(suffixFormatter, j); String row = generateRowWithSuffix(prefixRow, j);
KeyValue kv = new KeyValue(Bytes.toBytes(row), Bytes.toBytes("family"), KeyValue kv =
Bytes.toBytes("col"), now, Bytes.toBytes("value")); new KeyValue(Bytes.toBytes(row), Bytes.toBytes("family"), Bytes.toBytes("col"), now,
Bytes.toBytes("value"));
writer.append(kv); writer.append(kv);
} }
} }
@ -162,8 +164,9 @@ public class TestRowPrefixBloomFilter {
//Put with invalid row style //Put with invalid row style
for (int i = prefixRowCount; i < prefixRowCount * 2; i += 2) { // prefix rows for (int i = prefixRowCount; i < prefixRowCount * 2; i += 2) { // prefix rows
String row = String.format(invalidFormatter, i); String row = String.format(invalidFormatter, i);
KeyValue kv = new KeyValue(Bytes.toBytes(row), Bytes.toBytes("family"), KeyValue kv =
Bytes.toBytes("col"), now, Bytes.toBytes("value")); new KeyValue(Bytes.toBytes(row), Bytes.toBytes("family"), Bytes.toBytes("col"), now,
Bytes.toBytes("value"));
writer.append(kv); writer.append(kv);
} }
} finally { } finally {
@ -171,49 +174,45 @@ public class TestRowPrefixBloomFilter {
} }
} }
private String generateRowWithSuffix(String prefixRow, int suffix) {
StringBuilder row = new StringBuilder(prefixRow);
row.append("#");
row.append(String.format(suffixFormatter, suffix));
return row.toString();
}
@Test @Test
public void testRowPrefixBloomFilter() throws Exception { public void testRowPrefixBloomFilter() throws Exception {
FileSystem fs = FileSystem.getLocal(conf); FileSystem fs = FileSystem.getLocal(conf);
BloomType[] bt = {BloomType.ROWPREFIX_FIXED_LENGTH, BloomType.ROWPREFIX_DELIMITED};
int prefixRowCount = 50;
int suffixRowCount = 10;
int expKeys = 50;
float expErr = 2 * prefixRowCount * suffixRowCount * err; float expErr = 2 * prefixRowCount * suffixRowCount * err;
for (int x : new int[]{0,1}) { int expKeys = fixedLengthExpKeys;
// write the file // write the file
Path f = new Path(testDir, name.getMethodName()); Path f = new Path(testDir, name.getMethodName());
writeStoreFile(f, bt[x], expKeys, prefixRowCount, suffixRowCount); writeStoreFile(f, bt, expKeys);
// read the file // read the file
StoreFileReader reader = new StoreFileReader(fs, f, cacheConf, true, StoreFileReader reader =
new AtomicInteger(0), true, conf); new StoreFileReader(fs, f, cacheConf, true, new AtomicInteger(0), true, conf);
reader.loadFileInfo(); reader.loadFileInfo();
reader.loadBloomfilter(); reader.loadBloomfilter();
//check basic param //check basic param
assertEquals(bt[x], reader.getBloomFilterType()); assertEquals(bt, reader.getBloomFilterType());
if (bt[x] == BloomType.ROWPREFIX_FIXED_LENGTH) {
assertEquals(prefixLength, reader.getPrefixLength()); assertEquals(prefixLength, reader.getPrefixLength());
assertEquals("null", Bytes.toStringBinary(reader.getDelimiter()));
} else if (bt[x] == BloomType.ROWPREFIX_DELIMITED){
assertEquals(-1, reader.getPrefixLength());
assertEquals(delimiter, Bytes.toStringBinary(reader.getDelimiter()));
}
assertEquals(expKeys, reader.getGeneralBloomFilter().getKeyCount()); assertEquals(expKeys, reader.getGeneralBloomFilter().getKeyCount());
StoreFileScanner scanner = getStoreFileScanner(reader); StoreFileScanner scanner = getStoreFileScanner(reader);
HStore store = mock(HStore.class); HStore store = mock(HStore.class);
when(store.getColumnFamilyDescriptor()) when(store.getColumnFamilyDescriptor()).thenReturn(ColumnFamilyDescriptorBuilder.of("family"));
.thenReturn(ColumnFamilyDescriptorBuilder.of("family"));
// check false positives rate // check false positives rate
int falsePos = 0; int falsePos = 0;
int falseNeg = 0; int falseNeg = 0;
for (int i = 0; i < prefixRowCount; i++) { // prefix rows for (int i = 0; i < prefixRowCount; i++) { // prefix rows
String prefixRow = String.format(prefixFormatter, i); String prefixRow = String.format(prefixFormatter, i);
for (int j = 0; j < suffixRowCount; j++) { // suffix rows for (int j = 0; j < suffixRowCount; j++) { // suffix rows
String startRow = prefixRow + "#" + String.format(suffixFormatter, j); String startRow = generateRowWithSuffix(prefixRow, j);
String stopRow = prefixRow + "#" + String.format(suffixFormatter, j+1); String stopRow = generateRowWithSuffix(prefixRow, j + 1);
Scan scan = new Scan().withStartRow(Bytes.toBytes(startRow)) Scan scan =
.withStopRow(Bytes.toBytes(stopRow)); new Scan().withStartRow(Bytes.toBytes(startRow)).withStopRow(Bytes.toBytes(stopRow));
boolean exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE); boolean exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);
boolean shouldPrefixRowExist = i % 2 == 0; boolean shouldPrefixRowExist = i % 2 == 0;
if (shouldPrefixRowExist) { if (shouldPrefixRowExist) {
@ -247,52 +246,45 @@ public class TestRowPrefixBloomFilter {
fs.delete(f, true); fs.delete(f, true);
assertEquals("False negatives: " + falseNeg, 0, falseNeg); assertEquals("False negatives: " + falseNeg, 0, falseNeg);
int maxFalsePos = (int) (2 * expErr); int maxFalsePos = (int) (2 * expErr);
assertTrue("Too many false positives: " + falsePos assertTrue(
+ " (err=" + err + ", expected no more than " + maxFalsePos + ")", "Too many false positives: " + falsePos + " (err=" + err + ", expected no more than " +
falsePos <= maxFalsePos); maxFalsePos + ")", falsePos <= maxFalsePos);
}
} }
@Test @Test
public void testRowPrefixBloomFilterWithGet() throws Exception { public void testRowPrefixBloomFilterWithGet() throws Exception {
FileSystem fs = FileSystem.getLocal(conf); FileSystem fs = FileSystem.getLocal(conf);
BloomType[] bt = {BloomType.ROWPREFIX_FIXED_LENGTH, BloomType.ROWPREFIX_DELIMITED}; int expKeys = fixedLengthExpKeys;
int prefixRowCount = 50;
int suffixRowCount = 10;
int expKeys = 50;
for (int x : new int[]{0,1}) {
// write the file // write the file
Path f = new Path(testDir, name.getMethodName()); Path f = new Path(testDir, name.getMethodName());
writeStoreFile(f, bt[x], expKeys, prefixRowCount, suffixRowCount); writeStoreFile(f, bt, expKeys);
StoreFileReader reader = new StoreFileReader(fs, f, cacheConf, true, StoreFileReader reader =
new AtomicInteger(0), true, conf); new StoreFileReader(fs, f, cacheConf, true, new AtomicInteger(0), true, conf);
reader.loadFileInfo(); reader.loadFileInfo();
reader.loadBloomfilter(); reader.loadBloomfilter();
StoreFileScanner scanner = getStoreFileScanner(reader); StoreFileScanner scanner = getStoreFileScanner(reader);
HStore store = mock(HStore.class); HStore store = mock(HStore.class);
when(store.getColumnFamilyDescriptor()) when(store.getColumnFamilyDescriptor()).thenReturn(ColumnFamilyDescriptorBuilder.of("family"));
.thenReturn(ColumnFamilyDescriptorBuilder.of("family"));
//Get with valid row style //Get with valid row style
//prefix row in bloom //prefix row in bloom
String prefixRow = String.format(prefixFormatter, prefixRowCount - 2); String prefixRow = String.format(prefixFormatter, prefixRowCount - 2);
String row = prefixRow + "#" + String.format(suffixFormatter, 0); String row = generateRowWithSuffix(prefixRow, 0);
Scan scan = new Scan(new Get(Bytes.toBytes(row))); Scan scan = new Scan(new Get(Bytes.toBytes(row)));
boolean exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE); boolean exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);
assertTrue(exists); assertTrue(exists);
// prefix row not in bloom // prefix row not in bloom
prefixRow = String.format(prefixFormatter, prefixRowCount - 1); prefixRow = String.format(prefixFormatter, prefixRowCount - 1);
row = prefixRow + "#" + String.format(suffixFormatter, 0); row = generateRowWithSuffix(prefixRow, 0);
scan = new Scan(new Get(Bytes.toBytes(row))); scan = new Scan(new Get(Bytes.toBytes(row)));
exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE); exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);
assertFalse(exists); assertFalse(exists);
// Get with invalid row style // Get with invalid row style
// ROWPREFIX: the length of row is less than prefixLength // ROWPREFIX: the length of row is less than prefixLength
// ROWPREFIX_DELIMITED: Row does not contain delimiter
// row in bloom // row in bloom
row = String.format(invalidFormatter, prefixRowCount + 2); row = String.format(invalidFormatter, prefixRowCount + 2);
scan = new Scan(new Get(Bytes.toBytes(row))); scan = new Scan(new Get(Bytes.toBytes(row)));
@ -308,92 +300,61 @@ public class TestRowPrefixBloomFilter {
reader.close(true); // evict because we are about to delete the file reader.close(true); // evict because we are about to delete the file
fs.delete(f, true); fs.delete(f, true);
} }
}
@Test @Test
public void testRowPrefixBloomFilterWithScan() throws Exception { public void testRowPrefixBloomFilterWithScan() throws Exception {
FileSystem fs = FileSystem.getLocal(conf); FileSystem fs = FileSystem.getLocal(conf);
BloomType[] bt = {BloomType.ROWPREFIX_FIXED_LENGTH, BloomType.ROWPREFIX_DELIMITED}; int expKeys = fixedLengthExpKeys;
int prefixRowCount = 50;
int suffixRowCount = 10;
int expKeys = 50;
for (int x : new int[]{0,1}) {
// write the file // write the file
Path f = new Path(testDir, name.getMethodName()); Path f = new Path(testDir, name.getMethodName());
writeStoreFile(f, bt[x], expKeys, prefixRowCount, suffixRowCount); writeStoreFile(f, bt, expKeys);
StoreFileReader reader = new StoreFileReader(fs, f, cacheConf, true, StoreFileReader reader =
new AtomicInteger(0), true, conf); new StoreFileReader(fs, f, cacheConf, true, new AtomicInteger(0), true, conf);
reader.loadFileInfo(); reader.loadFileInfo();
reader.loadBloomfilter(); reader.loadBloomfilter();
StoreFileScanner scanner = getStoreFileScanner(reader); StoreFileScanner scanner = getStoreFileScanner(reader);
HStore store = mock(HStore.class); HStore store = mock(HStore.class);
when(store.getColumnFamilyDescriptor()) when(store.getColumnFamilyDescriptor()).thenReturn(ColumnFamilyDescriptorBuilder.of("family"));
.thenReturn(ColumnFamilyDescriptorBuilder.of("family"));
//Scan with valid row style. startRow and stopRow have a common prefix. //Scan with valid row style. startRow and stopRow have a common prefix.
//And the length of the common prefix is no less than prefixLength. //And the length of the common prefix is no less than prefixLength.
//prefix row in bloom //prefix row in bloom
String prefixRow = String.format(prefixFormatter, prefixRowCount - 2); String prefixRow = String.format(prefixFormatter, prefixRowCount - 2);
String startRow = prefixRow + "#" + String.format(suffixFormatter, 0); String startRow = generateRowWithSuffix(prefixRow, 0);
String stopRow = prefixRow + "#" + String.format(suffixFormatter, 1); String stopRow = generateRowWithSuffix(prefixRow, 1);
Scan scan = new Scan().withStartRow(Bytes.toBytes(startRow)) Scan scan =
.withStopRow(Bytes.toBytes(stopRow)); new Scan().withStartRow(Bytes.toBytes(startRow)).withStopRow(Bytes.toBytes(stopRow));
boolean exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE); boolean exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);
assertTrue(exists); assertTrue(exists);
// prefix row not in bloom // prefix row not in bloom
prefixRow = String.format(prefixFormatter, prefixRowCount - 1); prefixRow = String.format(prefixFormatter, prefixRowCount - 1);
startRow = prefixRow + "#" + String.format(suffixFormatter, 0); startRow = generateRowWithSuffix(prefixRow, 0);
stopRow = prefixRow + "#" + String.format(suffixFormatter, 1); stopRow = generateRowWithSuffix(prefixRow, 1);
scan = new Scan().withStartRow(Bytes.toBytes(startRow)) scan = new Scan().withStartRow(Bytes.toBytes(startRow)).withStopRow(Bytes.toBytes(stopRow));
.withStopRow(Bytes.toBytes(stopRow));
exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE); exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);
assertFalse(exists); assertFalse(exists);
// There is no common prefix between startRow and stopRow. // There is no common prefix between startRow and stopRow.
prefixRow = String.format(prefixFormatter, prefixRowCount - 2); prefixRow = String.format(prefixFormatter, prefixRowCount - 2);
startRow = prefixRow + "#" + String.format(suffixFormatter, 0); startRow = generateRowWithSuffix(prefixRow, 0);
scan = new Scan().withStartRow(Bytes.toBytes(startRow)); scan = new Scan().withStartRow(Bytes.toBytes(startRow));
exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE); exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);
assertTrue(exists); assertTrue(exists);
if (bt[x] == BloomType.ROWPREFIX_FIXED_LENGTH) {
// startRow and stopRow have a common prefix. // startRow and stopRow have a common prefix.
// But the length of the common prefix is less than prefixLength. // But the length of the common prefix is less than prefixLength.
String prefixStartRow = String.format(prefixFormatter, prefixRowCount - 2); String prefixStartRow = String.format(prefixFormatter, prefixRowCount - 2);
String prefixStopRow = String.format(prefixFormatter, prefixRowCount - 1); String prefixStopRow = String.format(prefixFormatter, prefixRowCount - 1);
startRow = prefixStartRow + "#" + String.format(suffixFormatter, 0); startRow = generateRowWithSuffix(prefixStartRow, 0);
stopRow = prefixStopRow + "#" + String.format(suffixFormatter, 0); stopRow = generateRowWithSuffix(prefixStopRow, 0);
scan = new Scan().withStartRow(Bytes.toBytes(startRow)) scan = new Scan().withStartRow(Bytes.toBytes(startRow)).withStopRow(Bytes.toBytes(stopRow));
.withStopRow(Bytes.toBytes(stopRow));
exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE); exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);
assertTrue(exists); assertTrue(exists);
}else if (bt[x] == BloomType.ROWPREFIX_DELIMITED) {
// startRow does not contain delimiter
String prefixStartRow = String.format(prefixFormatter, prefixRowCount-2);
String prefixStopRow = String.format(prefixFormatter, prefixRowCount-2);
startRow = prefixStartRow + String.format(suffixFormatter, 0);
stopRow = prefixStopRow + "#" + String.format(suffixFormatter, 0);
scan = new Scan().withStartRow(Bytes.toBytes(startRow))
.withStopRow(Bytes.toBytes(stopRow));
exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);
assertTrue(exists);
// startRow contains delimiter, but stopRow does not have the same prefix as startRow.
prefixStartRow = String.format(prefixFormatter, prefixRowCount-2);
prefixStopRow = String.format(prefixFormatter, prefixRowCount-1);
startRow = prefixStartRow + "#" + String.format(suffixFormatter, 0);
stopRow = prefixStopRow + "#" + String.format(suffixFormatter, 0);
scan = new Scan().withStartRow(Bytes.toBytes(startRow))
.withStopRow(Bytes.toBytes(stopRow));
exists = scanner.shouldUseScanner(scan, store, Long.MIN_VALUE);
assertTrue(exists);
}
reader.close(true); // evict because we are about to delete the file reader.close(true); // evict because we are about to delete the file
fs.delete(f, true); fs.delete(f, true);
} }
} }
}

View File

@ -105,7 +105,6 @@ public class TestScanWithBloomError {
conf = TEST_UTIL.getConfiguration(); conf = TEST_UTIL.getConfiguration();
fs = FileSystem.get(conf); fs = FileSystem.get(conf);
conf.setInt(BloomFilterUtil.PREFIX_LENGTH_KEY, 10); conf.setInt(BloomFilterUtil.PREFIX_LENGTH_KEY, 10);
conf.set(BloomFilterUtil.DELIMITER_KEY, "#");
} }
@Test @Test

View File

@ -144,7 +144,6 @@ public class TestSeekOptimizations {
rand = new Random(91238123L); rand = new Random(91238123L);
expectedKVs.clear(); expectedKVs.clear();
TEST_UTIL.getConfiguration().setInt(BloomFilterUtil.PREFIX_LENGTH_KEY, 10); TEST_UTIL.getConfiguration().setInt(BloomFilterUtil.PREFIX_LENGTH_KEY, 10);
TEST_UTIL.getConfiguration().set(BloomFilterUtil.DELIMITER_KEY, "#");
} }
@Test @Test
@ -485,6 +484,5 @@ public class TestSeekOptimizations {
HBaseTestingUtility.safeGetAsStr(actual, i) + " (length " + aLen + ")" + additionalMsg); HBaseTestingUtility.safeGetAsStr(actual, i) + " (length " + aLen + ")" + additionalMsg);
} }
} }
} }

View File

@ -11,7 +11,7 @@ package org.apache.hadoop.hbase.thrift.generated;
* An AlreadyExists exceptions signals that a table with the specified * An AlreadyExists exceptions signals that a table with the specified
* name already exists * name already exists
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class AlreadyExists extends org.apache.thrift.TException implements org.apache.thrift.TBase<AlreadyExists, AlreadyExists._Fields>, java.io.Serializable, Cloneable, Comparable<AlreadyExists> { public class AlreadyExists extends org.apache.thrift.TException implements org.apache.thrift.TBase<AlreadyExists, AlreadyExists._Fields>, java.io.Serializable, Cloneable, Comparable<AlreadyExists> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AlreadyExists"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("AlreadyExists");

View File

@ -10,7 +10,7 @@ package org.apache.hadoop.hbase.thrift.generated;
/** /**
* A BatchMutation object is used to apply a number of Mutations to a single row. * A BatchMutation object is used to apply a number of Mutations to a single row.
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class BatchMutation implements org.apache.thrift.TBase<BatchMutation, BatchMutation._Fields>, java.io.Serializable, Cloneable, Comparable<BatchMutation> { public class BatchMutation implements org.apache.thrift.TBase<BatchMutation, BatchMutation._Fields>, java.io.Serializable, Cloneable, Comparable<BatchMutation> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("BatchMutation"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("BatchMutation");

View File

@ -12,7 +12,7 @@ package org.apache.hadoop.hbase.thrift.generated;
* such as the number of versions, compression settings, etc. It is * such as the number of versions, compression settings, etc. It is
* used as input when creating a table or adding a column. * used as input when creating a table or adding a column.
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class ColumnDescriptor implements org.apache.thrift.TBase<ColumnDescriptor, ColumnDescriptor._Fields>, java.io.Serializable, Cloneable, Comparable<ColumnDescriptor> { public class ColumnDescriptor implements org.apache.thrift.TBase<ColumnDescriptor, ColumnDescriptor._Fields>, java.io.Serializable, Cloneable, Comparable<ColumnDescriptor> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ColumnDescriptor"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("ColumnDescriptor");

View File

@ -7,7 +7,7 @@
package org.apache.hadoop.hbase.thrift.generated; package org.apache.hadoop.hbase.thrift.generated;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class Hbase { public class Hbase {
public interface Iface { public interface Iface {

View File

@ -12,7 +12,7 @@ package org.apache.hadoop.hbase.thrift.generated;
* to the Hbase master or an Hbase region server. Also used to return * to the Hbase master or an Hbase region server. Also used to return
* more general Hbase error conditions. * more general Hbase error conditions.
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class IOError extends org.apache.thrift.TException implements org.apache.thrift.TBase<IOError, IOError._Fields>, java.io.Serializable, Cloneable, Comparable<IOError> { public class IOError extends org.apache.thrift.TException implements org.apache.thrift.TBase<IOError, IOError._Fields>, java.io.Serializable, Cloneable, Comparable<IOError> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("IOError"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("IOError");

View File

@ -11,7 +11,7 @@ package org.apache.hadoop.hbase.thrift.generated;
* An IllegalArgument exception indicates an illegal or invalid * An IllegalArgument exception indicates an illegal or invalid
* argument was passed into a procedure. * argument was passed into a procedure.
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class IllegalArgument extends org.apache.thrift.TException implements org.apache.thrift.TBase<IllegalArgument, IllegalArgument._Fields>, java.io.Serializable, Cloneable, Comparable<IllegalArgument> { public class IllegalArgument extends org.apache.thrift.TException implements org.apache.thrift.TBase<IllegalArgument, IllegalArgument._Fields>, java.io.Serializable, Cloneable, Comparable<IllegalArgument> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("IllegalArgument"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("IllegalArgument");

View File

@ -10,7 +10,7 @@ package org.apache.hadoop.hbase.thrift.generated;
/** /**
* A Mutation object is used to either update or delete a column-value. * A Mutation object is used to either update or delete a column-value.
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class Mutation implements org.apache.thrift.TBase<Mutation, Mutation._Fields>, java.io.Serializable, Cloneable, Comparable<Mutation> { public class Mutation implements org.apache.thrift.TBase<Mutation, Mutation._Fields>, java.io.Serializable, Cloneable, Comparable<Mutation> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Mutation"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Mutation");

View File

@ -10,7 +10,7 @@ package org.apache.hadoop.hbase.thrift.generated;
/** /**
* An Append object is used to specify the parameters for performing the append operation. * An Append object is used to specify the parameters for performing the append operation.
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class TAppend implements org.apache.thrift.TBase<TAppend, TAppend._Fields>, java.io.Serializable, Cloneable, Comparable<TAppend> { public class TAppend implements org.apache.thrift.TBase<TAppend, TAppend._Fields>, java.io.Serializable, Cloneable, Comparable<TAppend> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TAppend"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TAppend");

View File

@ -13,7 +13,7 @@ package org.apache.hadoop.hbase.thrift.generated;
* the timestamp of a cell to a first-class value, making it easy to take * the timestamp of a cell to a first-class value, making it easy to take
* note of temporal data. Cell is used all the way from HStore up to HTable. * note of temporal data. Cell is used all the way from HStore up to HTable.
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class TCell implements org.apache.thrift.TBase<TCell, TCell._Fields>, java.io.Serializable, Cloneable, Comparable<TCell> { public class TCell implements org.apache.thrift.TBase<TCell, TCell._Fields>, java.io.Serializable, Cloneable, Comparable<TCell> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TCell"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TCell");

View File

@ -10,7 +10,7 @@ package org.apache.hadoop.hbase.thrift.generated;
/** /**
* Holds column name and the cell. * Holds column name and the cell.
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class TColumn implements org.apache.thrift.TBase<TColumn, TColumn._Fields>, java.io.Serializable, Cloneable, Comparable<TColumn> { public class TColumn implements org.apache.thrift.TBase<TColumn, TColumn._Fields>, java.io.Serializable, Cloneable, Comparable<TColumn> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TColumn"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TColumn");

View File

@ -11,7 +11,7 @@ package org.apache.hadoop.hbase.thrift.generated;
* For increments that are not incrementColumnValue * For increments that are not incrementColumnValue
* equivalents. * equivalents.
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class TIncrement implements org.apache.thrift.TBase<TIncrement, TIncrement._Fields>, java.io.Serializable, Cloneable, Comparable<TIncrement> { public class TIncrement implements org.apache.thrift.TBase<TIncrement, TIncrement._Fields>, java.io.Serializable, Cloneable, Comparable<TIncrement> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TIncrement"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TIncrement");

View File

@ -10,7 +10,7 @@ package org.apache.hadoop.hbase.thrift.generated;
/** /**
* A TRegionInfo contains information about an HTable region. * A TRegionInfo contains information about an HTable region.
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class TRegionInfo implements org.apache.thrift.TBase<TRegionInfo, TRegionInfo._Fields>, java.io.Serializable, Cloneable, Comparable<TRegionInfo> { public class TRegionInfo implements org.apache.thrift.TBase<TRegionInfo, TRegionInfo._Fields>, java.io.Serializable, Cloneable, Comparable<TRegionInfo> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TRegionInfo"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TRegionInfo");

View File

@ -10,7 +10,7 @@ package org.apache.hadoop.hbase.thrift.generated;
/** /**
* Holds row name and then a map of columns to cells. * Holds row name and then a map of columns to cells.
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class TRowResult implements org.apache.thrift.TBase<TRowResult, TRowResult._Fields>, java.io.Serializable, Cloneable, Comparable<TRowResult> { public class TRowResult implements org.apache.thrift.TBase<TRowResult, TRowResult._Fields>, java.io.Serializable, Cloneable, Comparable<TRowResult> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TRowResult"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TRowResult");

View File

@ -10,7 +10,7 @@ package org.apache.hadoop.hbase.thrift.generated;
/** /**
* A Scan object is used to specify scanner parameters when opening a scanner. * A Scan object is used to specify scanner parameters when opening a scanner.
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class TScan implements org.apache.thrift.TBase<TScan, TScan._Fields>, java.io.Serializable, Cloneable, Comparable<TScan> { public class TScan implements org.apache.thrift.TBase<TScan, TScan._Fields>, java.io.Serializable, Cloneable, Comparable<TScan> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TScan"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TScan");

View File

@ -880,7 +880,6 @@ public class ThriftUtilities {
case 1: return BloomType.ROW; case 1: return BloomType.ROW;
case 2: return BloomType.ROWCOL; case 2: return BloomType.ROWCOL;
case 3: return BloomType.ROWPREFIX_FIXED_LENGTH; case 3: return BloomType.ROWPREFIX_FIXED_LENGTH;
case 4: return BloomType.ROWPREFIX_DELIMITED;
default: return BloomType.ROW; default: return BloomType.ROW;
} }
} }
@ -1105,7 +1104,6 @@ public class ThriftUtilities {
case ROW: return TBloomFilterType.ROW; case ROW: return TBloomFilterType.ROW;
case ROWCOL: return TBloomFilterType.ROWCOL; case ROWCOL: return TBloomFilterType.ROWCOL;
case ROWPREFIX_FIXED_LENGTH: return TBloomFilterType.ROWPREFIX_FIXED_LENGTH; case ROWPREFIX_FIXED_LENGTH: return TBloomFilterType.ROWPREFIX_FIXED_LENGTH;
case ROWPREFIX_DELIMITED: return TBloomFilterType.ROWPREFIX_DELIMITED;
default: return TBloomFilterType.ROW; default: return TBloomFilterType.ROW;
} }
} }

View File

@ -7,7 +7,7 @@
package org.apache.hadoop.hbase.thrift2.generated; package org.apache.hadoop.hbase.thrift2.generated;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class TAppend implements org.apache.thrift.TBase<TAppend, TAppend._Fields>, java.io.Serializable, Cloneable, Comparable<TAppend> { public class TAppend implements org.apache.thrift.TBase<TAppend, TAppend._Fields>, java.io.Serializable, Cloneable, Comparable<TAppend> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TAppend"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TAppend");

View File

@ -7,7 +7,7 @@
package org.apache.hadoop.hbase.thrift2.generated; package org.apache.hadoop.hbase.thrift2.generated;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class TAuthorization implements org.apache.thrift.TBase<TAuthorization, TAuthorization._Fields>, java.io.Serializable, Cloneable, Comparable<TAuthorization> { public class TAuthorization implements org.apache.thrift.TBase<TAuthorization, TAuthorization._Fields>, java.io.Serializable, Cloneable, Comparable<TAuthorization> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TAuthorization"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TAuthorization");

View File

@ -11,7 +11,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* Thrift wrapper around * Thrift wrapper around
* org.apache.hadoop.hbase.regionserver.BloomType * org.apache.hadoop.hbase.regionserver.BloomType
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public enum TBloomFilterType implements org.apache.thrift.TEnum { public enum TBloomFilterType implements org.apache.thrift.TEnum {
/** /**
* Bloomfilters disabled * Bloomfilters disabled
@ -28,11 +28,7 @@ public enum TBloomFilterType implements org.apache.thrift.TEnum {
/** /**
* Bloom enabled with Table row prefix as Key, specify the length of the prefix * Bloom enabled with Table row prefix as Key, specify the length of the prefix
*/ */
ROWPREFIX_FIXED_LENGTH(3), ROWPREFIX_FIXED_LENGTH(3);
/**
* Bloom enabled with Table row prefix as Key, specify the delimiter of the prefix
*/
ROWPREFIX_DELIMITED(4);
private final int value; private final int value;
@ -62,8 +58,6 @@ public enum TBloomFilterType implements org.apache.thrift.TEnum {
return ROWCOL; return ROWCOL;
case 3: case 3:
return ROWPREFIX_FIXED_LENGTH; return ROWPREFIX_FIXED_LENGTH;
case 4:
return ROWPREFIX_DELIMITED;
default: default:
return null; return null;
} }

View File

@ -7,7 +7,7 @@
package org.apache.hadoop.hbase.thrift2.generated; package org.apache.hadoop.hbase.thrift2.generated;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class TCellVisibility implements org.apache.thrift.TBase<TCellVisibility, TCellVisibility._Fields>, java.io.Serializable, Cloneable, Comparable<TCellVisibility> { public class TCellVisibility implements org.apache.thrift.TBase<TCellVisibility, TCellVisibility._Fields>, java.io.Serializable, Cloneable, Comparable<TCellVisibility> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TCellVisibility"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TCellVisibility");

View File

@ -12,7 +12,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* in a HBase table by column family and optionally * in a HBase table by column family and optionally
* a column qualifier and timestamp * a column qualifier and timestamp
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class TColumn implements org.apache.thrift.TBase<TColumn, TColumn._Fields>, java.io.Serializable, Cloneable, Comparable<TColumn> { public class TColumn implements org.apache.thrift.TBase<TColumn, TColumn._Fields>, java.io.Serializable, Cloneable, Comparable<TColumn> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TColumn"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TColumn");

View File

@ -11,7 +11,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* Thrift wrapper around * Thrift wrapper around
* org.apache.hadoop.hbase.client.ColumnFamilyDescriptor * org.apache.hadoop.hbase.client.ColumnFamilyDescriptor
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class TColumnFamilyDescriptor implements org.apache.thrift.TBase<TColumnFamilyDescriptor, TColumnFamilyDescriptor._Fields>, java.io.Serializable, Cloneable, Comparable<TColumnFamilyDescriptor> { public class TColumnFamilyDescriptor implements org.apache.thrift.TBase<TColumnFamilyDescriptor, TColumnFamilyDescriptor._Fields>, java.io.Serializable, Cloneable, Comparable<TColumnFamilyDescriptor> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TColumnFamilyDescriptor"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TColumnFamilyDescriptor");

View File

@ -10,7 +10,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
/** /**
* Represents a single cell and the amount to increment it by * Represents a single cell and the amount to increment it by
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class TColumnIncrement implements org.apache.thrift.TBase<TColumnIncrement, TColumnIncrement._Fields>, java.io.Serializable, Cloneable, Comparable<TColumnIncrement> { public class TColumnIncrement implements org.apache.thrift.TBase<TColumnIncrement, TColumnIncrement._Fields>, java.io.Serializable, Cloneable, Comparable<TColumnIncrement> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TColumnIncrement"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TColumnIncrement");

View File

@ -10,7 +10,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
/** /**
* Represents a single cell and its value. * Represents a single cell and its value.
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class TColumnValue implements org.apache.thrift.TBase<TColumnValue, TColumnValue._Fields>, java.io.Serializable, Cloneable, Comparable<TColumnValue> { public class TColumnValue implements org.apache.thrift.TBase<TColumnValue, TColumnValue._Fields>, java.io.Serializable, Cloneable, Comparable<TColumnValue> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TColumnValue"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TColumnValue");

View File

@ -11,7 +11,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* Thrift wrapper around * Thrift wrapper around
* org.apache.hadoop.hbase.filter.CompareFilter$CompareOp. * org.apache.hadoop.hbase.filter.CompareFilter$CompareOp.
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public enum TCompareOp implements org.apache.thrift.TEnum { public enum TCompareOp implements org.apache.thrift.TEnum {
LESS(0), LESS(0),
LESS_OR_EQUAL(1), LESS_OR_EQUAL(1),

View File

@ -11,7 +11,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* Thrift wrapper around * Thrift wrapper around
* org.apache.hadoop.hbase.io.compress.Algorithm * org.apache.hadoop.hbase.io.compress.Algorithm
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public enum TCompressionAlgorithm implements org.apache.thrift.TEnum { public enum TCompressionAlgorithm implements org.apache.thrift.TEnum {
LZO(0), LZO(0),
GZ(1), GZ(1),

View File

@ -12,7 +12,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* - STRONG means reads only from primary region * - STRONG means reads only from primary region
* - TIMELINE means reads might return values from secondary region replicas * - TIMELINE means reads might return values from secondary region replicas
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public enum TConsistency implements org.apache.thrift.TEnum { public enum TConsistency implements org.apache.thrift.TEnum {
STRONG(1), STRONG(1),
TIMELINE(2); TIMELINE(2);

View File

@ -11,7 +11,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* Thrift wrapper around * Thrift wrapper around
* org.apache.hadoop.hbase.io.encoding.DataBlockEncoding * org.apache.hadoop.hbase.io.encoding.DataBlockEncoding
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public enum TDataBlockEncoding implements org.apache.thrift.TEnum { public enum TDataBlockEncoding implements org.apache.thrift.TEnum {
/** /**
* Disable data block encoding. * Disable data block encoding.

View File

@ -33,7 +33,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* by changing the durability. If you don't provide durability, it defaults to * by changing the durability. If you don't provide durability, it defaults to
* column family's default setting for durability. * column family's default setting for durability.
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class TDelete implements org.apache.thrift.TBase<TDelete, TDelete._Fields>, java.io.Serializable, Cloneable, Comparable<TDelete> { public class TDelete implements org.apache.thrift.TBase<TDelete, TDelete._Fields>, java.io.Serializable, Cloneable, Comparable<TDelete> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TDelete"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TDelete");

View File

@ -12,7 +12,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* - DELETE_COLUMN means exactly one version will be removed, * - DELETE_COLUMN means exactly one version will be removed,
* - DELETE_COLUMNS means previous versions will also be removed. * - DELETE_COLUMNS means previous versions will also be removed.
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public enum TDeleteType implements org.apache.thrift.TEnum { public enum TDeleteType implements org.apache.thrift.TEnum {
DELETE_COLUMN(0), DELETE_COLUMN(0),
DELETE_COLUMNS(1), DELETE_COLUMNS(1),

View File

@ -14,7 +14,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* - SYNC_WAL means write the Mutation to the WAL synchronously, * - SYNC_WAL means write the Mutation to the WAL synchronously,
* - FSYNC_WAL means Write the Mutation to the WAL synchronously and force the entries to disk. * - FSYNC_WAL means Write the Mutation to the WAL synchronously and force the entries to disk.
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public enum TDurability implements org.apache.thrift.TEnum { public enum TDurability implements org.apache.thrift.TEnum {
USE_DEFAULT(0), USE_DEFAULT(0),
SKIP_WAL(1), SKIP_WAL(1),

View File

@ -20,7 +20,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* If you specify a time range and a timestamp the range is ignored. * If you specify a time range and a timestamp the range is ignored.
* Timestamps on TColumns are ignored. * Timestamps on TColumns are ignored.
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class TGet implements org.apache.thrift.TBase<TGet, TGet._Fields>, java.io.Serializable, Cloneable, Comparable<TGet> { public class TGet implements org.apache.thrift.TBase<TGet, TGet._Fields>, java.io.Serializable, Cloneable, Comparable<TGet> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGet"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TGet");

View File

@ -7,7 +7,7 @@
package org.apache.hadoop.hbase.thrift2.generated; package org.apache.hadoop.hbase.thrift2.generated;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class THBaseService { public class THBaseService {
public interface Iface { public interface Iface {

View File

@ -7,7 +7,7 @@
package org.apache.hadoop.hbase.thrift2.generated; package org.apache.hadoop.hbase.thrift2.generated;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class THRegionInfo implements org.apache.thrift.TBase<THRegionInfo, THRegionInfo._Fields>, java.io.Serializable, Cloneable, Comparable<THRegionInfo> { public class THRegionInfo implements org.apache.thrift.TBase<THRegionInfo, THRegionInfo._Fields>, java.io.Serializable, Cloneable, Comparable<THRegionInfo> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("THRegionInfo"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("THRegionInfo");

View File

@ -7,7 +7,7 @@
package org.apache.hadoop.hbase.thrift2.generated; package org.apache.hadoop.hbase.thrift2.generated;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class THRegionLocation implements org.apache.thrift.TBase<THRegionLocation, THRegionLocation._Fields>, java.io.Serializable, Cloneable, Comparable<THRegionLocation> { public class THRegionLocation implements org.apache.thrift.TBase<THRegionLocation, THRegionLocation._Fields>, java.io.Serializable, Cloneable, Comparable<THRegionLocation> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("THRegionLocation"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("THRegionLocation");

View File

@ -12,7 +12,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* to the HBase master or a HBase region server. Also used to return * to the HBase master or a HBase region server. Also used to return
* more general HBase error conditions. * more general HBase error conditions.
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class TIOError extends org.apache.thrift.TException implements org.apache.thrift.TBase<TIOError, TIOError._Fields>, java.io.Serializable, Cloneable, Comparable<TIOError> { public class TIOError extends org.apache.thrift.TException implements org.apache.thrift.TBase<TIOError, TIOError._Fields>, java.io.Serializable, Cloneable, Comparable<TIOError> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TIOError"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TIOError");

View File

@ -11,7 +11,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* A TIllegalArgument exception indicates an illegal or invalid * A TIllegalArgument exception indicates an illegal or invalid
* argument was passed into a procedure. * argument was passed into a procedure.
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class TIllegalArgument extends org.apache.thrift.TException implements org.apache.thrift.TBase<TIllegalArgument, TIllegalArgument._Fields>, java.io.Serializable, Cloneable, Comparable<TIllegalArgument> { public class TIllegalArgument extends org.apache.thrift.TException implements org.apache.thrift.TBase<TIllegalArgument, TIllegalArgument._Fields>, java.io.Serializable, Cloneable, Comparable<TIllegalArgument> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TIllegalArgument"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TIllegalArgument");

View File

@ -14,7 +14,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* by changing the durability. If you don't provide durability, it defaults to * by changing the durability. If you don't provide durability, it defaults to
* column family's default setting for durability. * column family's default setting for durability.
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class TIncrement implements org.apache.thrift.TBase<TIncrement, TIncrement._Fields>, java.io.Serializable, Cloneable, Comparable<TIncrement> { public class TIncrement implements org.apache.thrift.TBase<TIncrement, TIncrement._Fields>, java.io.Serializable, Cloneable, Comparable<TIncrement> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TIncrement"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TIncrement");

View File

@ -11,7 +11,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* Thrift wrapper around * Thrift wrapper around
* org.apache.hadoop.hbase.KeepDeletedCells * org.apache.hadoop.hbase.KeepDeletedCells
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public enum TKeepDeletedCells implements org.apache.thrift.TEnum { public enum TKeepDeletedCells implements org.apache.thrift.TEnum {
/** /**
* Deleted Cells are not retained. * Deleted Cells are not retained.

View File

@ -10,7 +10,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
/** /**
* Atomic mutation for the specified row. It can be either Put or Delete. * Atomic mutation for the specified row. It can be either Put or Delete.
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class TMutation extends org.apache.thrift.TUnion<TMutation, TMutation._Fields> { public class TMutation extends org.apache.thrift.TUnion<TMutation, TMutation._Fields> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TMutation"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TMutation");
private static final org.apache.thrift.protocol.TField PUT_FIELD_DESC = new org.apache.thrift.protocol.TField("put", org.apache.thrift.protocol.TType.STRUCT, (short)1); private static final org.apache.thrift.protocol.TField PUT_FIELD_DESC = new org.apache.thrift.protocol.TField("put", org.apache.thrift.protocol.TType.STRUCT, (short)1);

View File

@ -11,7 +11,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* Thrift wrapper around * Thrift wrapper around
* org.apache.hadoop.hbase.NamespaceDescriptor * org.apache.hadoop.hbase.NamespaceDescriptor
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class TNamespaceDescriptor implements org.apache.thrift.TBase<TNamespaceDescriptor, TNamespaceDescriptor._Fields>, java.io.Serializable, Cloneable, Comparable<TNamespaceDescriptor> { public class TNamespaceDescriptor implements org.apache.thrift.TBase<TNamespaceDescriptor, TNamespaceDescriptor._Fields>, java.io.Serializable, Cloneable, Comparable<TNamespaceDescriptor> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TNamespaceDescriptor"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TNamespaceDescriptor");

View File

@ -19,7 +19,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* by changing the durability. If you don't provide durability, it defaults to * by changing the durability. If you don't provide durability, it defaults to
* column family's default setting for durability. * column family's default setting for durability.
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class TPut implements org.apache.thrift.TBase<TPut, TPut._Fields>, java.io.Serializable, Cloneable, Comparable<TPut> { public class TPut implements org.apache.thrift.TBase<TPut, TPut._Fields>, java.io.Serializable, Cloneable, Comparable<TPut> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TPut"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TPut");

View File

@ -7,7 +7,7 @@
package org.apache.hadoop.hbase.thrift2.generated; package org.apache.hadoop.hbase.thrift2.generated;
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public enum TReadType implements org.apache.thrift.TEnum { public enum TReadType implements org.apache.thrift.TEnum {
DEFAULT(1), DEFAULT(1),
STREAM(2), STREAM(2),

View File

@ -10,7 +10,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
/** /**
* if no Result is found, row and columnValues will not be set. * if no Result is found, row and columnValues will not be set.
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class TResult implements org.apache.thrift.TBase<TResult, TResult._Fields>, java.io.Serializable, Cloneable, Comparable<TResult> { public class TResult implements org.apache.thrift.TBase<TResult, TResult._Fields>, java.io.Serializable, Cloneable, Comparable<TResult> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TResult"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TResult");

View File

@ -10,7 +10,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
/** /**
* A TRowMutations object is used to apply a number of Mutations to a single row. * A TRowMutations object is used to apply a number of Mutations to a single row.
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class TRowMutations implements org.apache.thrift.TBase<TRowMutations, TRowMutations._Fields>, java.io.Serializable, Cloneable, Comparable<TRowMutations> { public class TRowMutations implements org.apache.thrift.TBase<TRowMutations, TRowMutations._Fields>, java.io.Serializable, Cloneable, Comparable<TRowMutations> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TRowMutations"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TRowMutations");

View File

@ -11,7 +11,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* Any timestamps in the columns are ignored but the colFamTimeRangeMap included, use timeRange to select by timestamp. * Any timestamps in the columns are ignored but the colFamTimeRangeMap included, use timeRange to select by timestamp.
* Max versions defaults to 1. * Max versions defaults to 1.
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class TScan implements org.apache.thrift.TBase<TScan, TScan._Fields>, java.io.Serializable, Cloneable, Comparable<TScan> { public class TScan implements org.apache.thrift.TBase<TScan, TScan._Fields>, java.io.Serializable, Cloneable, Comparable<TScan> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TScan"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TScan");

View File

@ -7,7 +7,7 @@
package org.apache.hadoop.hbase.thrift2.generated; package org.apache.hadoop.hbase.thrift2.generated;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class TServerName implements org.apache.thrift.TBase<TServerName, TServerName._Fields>, java.io.Serializable, Cloneable, Comparable<TServerName> { public class TServerName implements org.apache.thrift.TBase<TServerName, TServerName._Fields>, java.io.Serializable, Cloneable, Comparable<TServerName> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TServerName"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TServerName");

View File

@ -11,7 +11,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* Thrift wrapper around * Thrift wrapper around
* org.apache.hadoop.hbase.client.TableDescriptor * org.apache.hadoop.hbase.client.TableDescriptor
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class TTableDescriptor implements org.apache.thrift.TBase<TTableDescriptor, TTableDescriptor._Fields>, java.io.Serializable, Cloneable, Comparable<TTableDescriptor> { public class TTableDescriptor implements org.apache.thrift.TBase<TTableDescriptor, TTableDescriptor._Fields>, java.io.Serializable, Cloneable, Comparable<TTableDescriptor> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TTableDescriptor"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TTableDescriptor");

View File

@ -11,7 +11,7 @@ package org.apache.hadoop.hbase.thrift2.generated;
* Thrift wrapper around * Thrift wrapper around
* org.apache.hadoop.hbase.TableName * org.apache.hadoop.hbase.TableName
*/ */
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class TTableName implements org.apache.thrift.TBase<TTableName, TTableName._Fields>, java.io.Serializable, Cloneable, Comparable<TTableName> { public class TTableName implements org.apache.thrift.TBase<TTableName, TTableName._Fields>, java.io.Serializable, Cloneable, Comparable<TTableName> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TTableName"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TTableName");

View File

@ -7,7 +7,7 @@
package org.apache.hadoop.hbase.thrift2.generated; package org.apache.hadoop.hbase.thrift2.generated;
@SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"})
@javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-01-27") @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.12.0)", date = "2019-02-22")
public class TTimeRange implements org.apache.thrift.TBase<TTimeRange, TTimeRange._Fields>, java.io.Serializable, Cloneable, Comparable<TTimeRange> { public class TTimeRange implements org.apache.thrift.TBase<TTimeRange, TTimeRange._Fields>, java.io.Serializable, Cloneable, Comparable<TTimeRange> {
private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TTimeRange"); private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("TTimeRange");

View File

@ -339,10 +339,6 @@ enum TBloomFilterType {
* Bloom enabled with Table row prefix as Key, specify the length of the prefix * Bloom enabled with Table row prefix as Key, specify the length of the prefix
*/ */
ROWPREFIX_FIXED_LENGTH = 3, ROWPREFIX_FIXED_LENGTH = 3,
/**
* Bloom enabled with Table row prefix as Key, specify the delimiter of the prefix
*/
ROWPREFIX_DELIMITED = 4
} }
/** /**