HBASE-15085 IllegalStateException was thrown when scanning on bulkloaded

HFiles (Victor Xu)
This commit is contained in:
ramkrishna 2016-01-12 14:36:48 +05:30
parent 83c506d9d4
commit 840f5ea686
3 changed files with 73 additions and 4 deletions

View File

@ -60,6 +60,7 @@ import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.io.hfile.HFileContext;
import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder; import org.apache.hadoop.hbase.io.hfile.HFileContextBuilder;
import org.apache.hadoop.hbase.io.hfile.HFileDataBlockEncoder;
import org.apache.hadoop.hbase.io.hfile.HFileScanner; import org.apache.hadoop.hbase.io.hfile.HFileScanner;
import org.apache.hadoop.hbase.protobuf.ProtobufUtil; import org.apache.hadoop.hbase.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.regionserver.BloomType; import org.apache.hadoop.hbase.regionserver.BloomType;
@ -929,6 +930,11 @@ public class LoadIncrementalHFiles extends Configured implements Tool {
} }
private static boolean shouldCopyHFileMetaKey(byte[] key) { private static boolean shouldCopyHFileMetaKey(byte[] key) {
// skip encoding to keep hfile meta consistent with data block info, see HBASE-15085
if (Bytes.equals(key, HFileDataBlockEncoder.DATA_BLOCK_ENCODING)) {
return false;
}
return !HFile.isReservedFileInfoKey(key); return !HFile.isReservedFileInfoKey(key);
} }

View File

@ -41,6 +41,7 @@ import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.codec.KeyValueCodecWithTags; import org.apache.hadoop.hbase.codec.KeyValueCodecWithTags;
import org.apache.hadoop.hbase.coprocessor.CoprocessorHost; import org.apache.hadoop.hbase.coprocessor.CoprocessorHost;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.io.hfile.HFileScanner; import org.apache.hadoop.hbase.io.hfile.HFileScanner;
@ -482,6 +483,51 @@ public class TestLoadIncrementalHFiles {
assertEquals(1000, rowCount); assertEquals(1000, rowCount);
} }
@Test
public void testSplitStoreFileWithNoneToNone() throws IOException {
testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.NONE, DataBlockEncoding.NONE);
}
@Test
public void testSplitStoreFileWithEncodedToEncoded() throws IOException {
testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.DIFF, DataBlockEncoding.DIFF);
}
@Test
public void testSplitStoreFileWithEncodedToNone() throws IOException {
testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.DIFF, DataBlockEncoding.NONE);
}
@Test
public void testSplitStoreFileWithNoneToEncoded() throws IOException {
testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.NONE, DataBlockEncoding.DIFF);
}
private void testSplitStoreFileWithDifferentEncoding(DataBlockEncoding bulkloadEncoding,
DataBlockEncoding cfEncoding) throws IOException {
Path dir = util.getDataTestDirOnTestFS("testSplitHFileWithDifferentEncoding");
FileSystem fs = util.getTestFileSystem();
Path testIn = new Path(dir, "testhfile");
HColumnDescriptor familyDesc = new HColumnDescriptor(FAMILY);
familyDesc.setDataBlockEncoding(cfEncoding);
HFileTestUtil.createHFileWithDataBlockEncoding(
util.getConfiguration(), fs, testIn, bulkloadEncoding,
FAMILY, QUALIFIER, Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000);
Path bottomOut = new Path(dir, "bottom.out");
Path topOut = new Path(dir, "top.out");
LoadIncrementalHFiles.splitStoreFile(
util.getConfiguration(), testIn,
familyDesc, Bytes.toBytes("ggg"),
bottomOut,
topOut);
int rowCount = verifyHFile(bottomOut);
rowCount += verifyHFile(topOut);
assertEquals(1000, rowCount);
}
private int verifyHFile(Path p) throws IOException { private int verifyHFile(Path p) throws IOException {
Configuration conf = util.getConfiguration(); Configuration conf = util.getConfiguration();
HFile.Reader reader = HFile.createReader( HFile.Reader reader = HFile.createReader(

View File

@ -32,6 +32,7 @@ import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.CacheConfig; import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFile; import org.apache.hadoop.hbase.io.hfile.HFile;
import org.apache.hadoop.hbase.io.hfile.HFileContext; import org.apache.hadoop.hbase.io.hfile.HFileContext;
@ -59,7 +60,21 @@ public class HFileTestUtil {
FileSystem fs, Path path, FileSystem fs, Path path,
byte[] family, byte[] qualifier, byte[] family, byte[] qualifier,
byte[] startKey, byte[] endKey, int numRows) throws IOException { byte[] startKey, byte[] endKey, int numRows) throws IOException {
createHFile(configuration, fs, path, family, qualifier, startKey, endKey, createHFile(configuration, fs, path, DataBlockEncoding.NONE, family, qualifier,
startKey, endKey, numRows, false);
}
/**
* Create an HFile with the given number of rows between a given
* start key and end key @ family:qualifier. The value will be the key value.
* This file will use certain data block encoding algorithm.
*/
public static void createHFileWithDataBlockEncoding(
Configuration configuration,
FileSystem fs, Path path, DataBlockEncoding encoding,
byte[] family, byte[] qualifier,
byte[] startKey, byte[] endKey, int numRows) throws IOException {
createHFile(configuration, fs, path, encoding, family, qualifier, startKey, endKey,
numRows, false); numRows, false);
} }
@ -73,7 +88,8 @@ public class HFileTestUtil {
FileSystem fs, Path path, FileSystem fs, Path path,
byte[] family, byte[] qualifier, byte[] family, byte[] qualifier,
byte[] startKey, byte[] endKey, int numRows) throws IOException { byte[] startKey, byte[] endKey, int numRows) throws IOException {
createHFile(configuration, fs, path, family, qualifier, startKey, endKey, numRows, true); createHFile(configuration, fs, path, DataBlockEncoding.NONE, family, qualifier,
startKey, endKey, numRows, true);
} }
/** /**
@ -84,11 +100,12 @@ public class HFileTestUtil {
*/ */
public static void createHFile( public static void createHFile(
Configuration configuration, Configuration configuration,
FileSystem fs, Path path, FileSystem fs, Path path, DataBlockEncoding encoding,
byte[] family, byte[] qualifier, byte[] family, byte[] qualifier,
byte[] startKey, byte[] endKey, int numRows, boolean withTag) throws IOException { byte[] startKey, byte[] endKey, int numRows, boolean withTag) throws IOException {
HFileContext meta = new HFileContextBuilder() HFileContext meta = new HFileContextBuilder()
.withIncludesTags(withTag) .withIncludesTags(withTag)
.withDataBlockEncoding(encoding)
.build(); .build();
HFile.Writer writer = HFile.getWriterFactory(configuration, new CacheConfig(configuration)) HFile.Writer writer = HFile.getWriterFactory(configuration, new CacheConfig(configuration))
.withPath(fs, path) .withPath(fs, path)
@ -141,4 +158,4 @@ public class HFileTestUtil {
} }
} }
} }
} }