HBASE-15085 IllegalStateException was thrown when scanning on bulkloaded
HFiles (Victor Xu)
This commit is contained in:
parent
417e3c4a73
commit
89eba459f2
|
@ -84,6 +84,7 @@ import org.apache.hadoop.hbase.io.hfile.CacheConfig;
|
||||||
import org.apache.hadoop.hbase.io.hfile.HFile;
|
import org.apache.hadoop.hbase.io.hfile.HFile;
|
||||||
import org.apache.hadoop.hbase.io.hfile.HFileContext;
|
import org.apache.hadoop.hbase.io.hfile.HFileContext;
|
||||||
import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
|
import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
|
||||||
|
import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder;
|
||||||
import org.apache.hadoop.hbase.io.hfile.HFileScanner;
|
import org.apache.hadoop.hbase.io.hfile.HFileScanner;
|
||||||
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
|
||||||
import org.apache.hadoop.hbase.regionserver.BloomType;
|
import org.apache.hadoop.hbase.regionserver.BloomType;
|
||||||
|
@ -970,6 +971,11 @@ public class LoadIncrementalHFiles extends Configured implements Tool {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static boolean shouldCopyHFileMetaKey(byte[] key) {
|
private static boolean shouldCopyHFileMetaKey(byte[] key) {
|
||||||
|
// skip encoding to keep hfile meta consistent with data block info, see HBASE-15085
|
||||||
|
if (Bytes.equals(key, HFileDataBlockEncoder.DATA_BLOCK_ENCODING)) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
return !HFile.isReservedFileInfoKey(key);
|
return !HFile.isReservedFileInfoKey(key);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -44,6 +44,7 @@ import org.apache.hadoop.hbase.client.HTable;
|
||||||
import org.apache.hadoop.hbase.client.Table;
|
import org.apache.hadoop.hbase.client.Table;
|
||||||
import org.apache.hadoop.hbase.codec.KeyValueCodecWithTags;
|
import org.apache.hadoop.hbase.codec.KeyValueCodecWithTags;
|
||||||
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
|
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
|
||||||
|
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
|
||||||
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
|
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
|
||||||
import org.apache.hadoop.hbase.io.hfile.HFile;
|
import org.apache.hadoop.hbase.io.hfile.HFile;
|
||||||
import org.apache.hadoop.hbase.io.hfile.HFileScanner;
|
import org.apache.hadoop.hbase.io.hfile.HFileScanner;
|
||||||
|
@ -493,6 +494,51 @@ public class TestLoadIncrementalHFiles {
|
||||||
assertEquals(1000, rowCount);
|
assertEquals(1000, rowCount);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSplitStoreFileWithNoneToNone() throws IOException {
|
||||||
|
testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.NONE, DataBlockEncoding.NONE);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSplitStoreFileWithEncodedToEncoded() throws IOException {
|
||||||
|
testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.DIFF, DataBlockEncoding.DIFF);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSplitStoreFileWithEncodedToNone() throws IOException {
|
||||||
|
testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.DIFF, DataBlockEncoding.NONE);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testSplitStoreFileWithNoneToEncoded() throws IOException {
|
||||||
|
testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.NONE, DataBlockEncoding.DIFF);
|
||||||
|
}
|
||||||
|
|
||||||
|
private void testSplitStoreFileWithDifferentEncoding(DataBlockEncoding bulkloadEncoding,
|
||||||
|
DataBlockEncoding cfEncoding) throws IOException {
|
||||||
|
Path dir = util.getDataTestDirOnTestFS("testSplitHFileWithDifferentEncoding");
|
||||||
|
FileSystem fs = util.getTestFileSystem();
|
||||||
|
Path testIn = new Path(dir, "testhfile");
|
||||||
|
HColumnDescriptor familyDesc = new HColumnDescriptor(FAMILY);
|
||||||
|
familyDesc.setDataBlockEncoding(cfEncoding);
|
||||||
|
HFileTestUtil.createHFileWithDataBlockEncoding(
|
||||||
|
util.getConfiguration(), fs, testIn, bulkloadEncoding,
|
||||||
|
FAMILY, QUALIFIER, Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000);
|
||||||
|
|
||||||
|
Path bottomOut = new Path(dir, "bottom.out");
|
||||||
|
Path topOut = new Path(dir, "top.out");
|
||||||
|
|
||||||
|
LoadIncrementalHFiles.splitStoreFile(
|
||||||
|
util.getConfiguration(), testIn,
|
||||||
|
familyDesc, Bytes.toBytes("ggg"),
|
||||||
|
bottomOut,
|
||||||
|
topOut);
|
||||||
|
|
||||||
|
int rowCount = verifyHFile(bottomOut);
|
||||||
|
rowCount += verifyHFile(topOut);
|
||||||
|
assertEquals(1000, rowCount);
|
||||||
|
}
|
||||||
|
|
||||||
private int verifyHFile(Path p) throws IOException {
|
private int verifyHFile(Path p) throws IOException {
|
||||||
Configuration conf = util.getConfiguration();
|
Configuration conf = util.getConfiguration();
|
||||||
HFile.Reader reader = HFile.createReader(
|
HFile.Reader reader = HFile.createReader(
|
||||||
|
|
|
@ -29,6 +29,7 @@ import org.apache.hadoop.hbase.client.Result;
|
||||||
import org.apache.hadoop.hbase.client.ResultScanner;
|
import org.apache.hadoop.hbase.client.ResultScanner;
|
||||||
import org.apache.hadoop.hbase.client.Scan;
|
import org.apache.hadoop.hbase.client.Scan;
|
||||||
import org.apache.hadoop.hbase.client.Table;
|
import org.apache.hadoop.hbase.client.Table;
|
||||||
|
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
|
||||||
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
|
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
|
||||||
import org.apache.hadoop.hbase.io.hfile.HFile;
|
import org.apache.hadoop.hbase.io.hfile.HFile;
|
||||||
import org.apache.hadoop.hbase.io.hfile.HFileContext;
|
import org.apache.hadoop.hbase.io.hfile.HFileContext;
|
||||||
|
@ -57,7 +58,21 @@ public class HFileTestUtil {
|
||||||
FileSystem fs, Path path,
|
FileSystem fs, Path path,
|
||||||
byte[] family, byte[] qualifier,
|
byte[] family, byte[] qualifier,
|
||||||
byte[] startKey, byte[] endKey, int numRows) throws IOException {
|
byte[] startKey, byte[] endKey, int numRows) throws IOException {
|
||||||
createHFile(configuration, fs, path, family, qualifier, startKey, endKey,
|
createHFile(configuration, fs, path, DataBlockEncoding.NONE, family, qualifier,
|
||||||
|
startKey, endKey, numRows, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create an HFile with the given number of rows between a given
|
||||||
|
* start key and end key @ family:qualifier. The value will be the key value.
|
||||||
|
* This file will use certain data block encoding algorithm.
|
||||||
|
*/
|
||||||
|
public static void createHFileWithDataBlockEncoding(
|
||||||
|
Configuration configuration,
|
||||||
|
FileSystem fs, Path path, DataBlockEncoding encoding,
|
||||||
|
byte[] family, byte[] qualifier,
|
||||||
|
byte[] startKey, byte[] endKey, int numRows) throws IOException {
|
||||||
|
createHFile(configuration, fs, path, encoding, family, qualifier, startKey, endKey,
|
||||||
numRows, false);
|
numRows, false);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -71,7 +86,8 @@ public class HFileTestUtil {
|
||||||
FileSystem fs, Path path,
|
FileSystem fs, Path path,
|
||||||
byte[] family, byte[] qualifier,
|
byte[] family, byte[] qualifier,
|
||||||
byte[] startKey, byte[] endKey, int numRows) throws IOException {
|
byte[] startKey, byte[] endKey, int numRows) throws IOException {
|
||||||
createHFile(configuration, fs, path, family, qualifier, startKey, endKey, numRows, true);
|
createHFile(configuration, fs, path, DataBlockEncoding.NONE, family, qualifier,
|
||||||
|
startKey, endKey, numRows, true);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -82,11 +98,12 @@ public class HFileTestUtil {
|
||||||
*/
|
*/
|
||||||
public static void createHFile(
|
public static void createHFile(
|
||||||
Configuration configuration,
|
Configuration configuration,
|
||||||
FileSystem fs, Path path,
|
FileSystem fs, Path path, DataBlockEncoding encoding,
|
||||||
byte[] family, byte[] qualifier,
|
byte[] family, byte[] qualifier,
|
||||||
byte[] startKey, byte[] endKey, int numRows, boolean withTag) throws IOException {
|
byte[] startKey, byte[] endKey, int numRows, boolean withTag) throws IOException {
|
||||||
HFileContext meta = new HFileContextBuilder()
|
HFileContext meta = new HFileContextBuilder()
|
||||||
.withIncludesTags(withTag)
|
.withIncludesTags(withTag)
|
||||||
|
.withDataBlockEncoding(encoding)
|
||||||
.build();
|
.build();
|
||||||
HFile.Writer writer = HFile.getWriterFactory(configuration, new CacheConfig(configuration))
|
HFile.Writer writer = HFile.getWriterFactory(configuration, new CacheConfig(configuration))
|
||||||
.withPath(fs, path)
|
.withPath(fs, path)
|
||||||
|
|
Loading…
Reference in New Issue