HBASE-27688 HFile splitting occurs during bulkload, the CREATE_TIME_TS of hfileinfo is 0 (#5082)
Co-authored-by: alanzhao <alanzhao@126.com> Signed-off-by: Duo Zhang <zhangduo@apache.org>
This commit is contained in:
parent
0491524eb1
commit
c013c7c72c
|
@ -88,6 +88,7 @@ import org.apache.hadoop.hbase.regionserver.StoreUtils;
|
|||
import org.apache.hadoop.hbase.security.UserProvider;
|
||||
import org.apache.hadoop.hbase.security.token.FsDelegationToken;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||
import org.apache.hadoop.hbase.util.FSVisitor;
|
||||
import org.apache.hadoop.hbase.util.FutureUtils;
|
||||
import org.apache.hadoop.hbase.util.Pair;
|
||||
|
@ -767,7 +768,7 @@ public class BulkLoadHFilesTool extends Configured implements BulkLoadHFiles, To
|
|||
.withChecksumType(StoreUtils.getChecksumType(conf))
|
||||
.withBytesPerCheckSum(StoreUtils.getBytesPerChecksum(conf)).withBlockSize(blocksize)
|
||||
.withDataBlockEncoding(familyDescriptor.getDataBlockEncoding()).withIncludesTags(true)
|
||||
.build();
|
||||
.withCreateTime(EnvironmentEdgeManager.currentTime()).build();
|
||||
halfWriter = new StoreFileWriter.Builder(conf, cacheConf, fs).withFilePath(outFile)
|
||||
.withBloomType(bloomFilterType).withFileContext(hFileContext).build();
|
||||
HFileScanner scanner = halfReader.getScanner(false, false, false);
|
||||
|
|
|
@ -18,6 +18,7 @@
|
|||
package org.apache.hadoop.hbase.tool;
|
||||
|
||||
import static org.apache.hadoop.hbase.HBaseTestingUtil.countRows;
|
||||
import static org.hamcrest.Matchers.greaterThan;
|
||||
import static org.junit.Assert.assertArrayEquals;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertThrows;
|
||||
|
@ -63,6 +64,7 @@ import org.apache.hadoop.hbase.testclassification.MiscTests;
|
|||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.CommonFSUtils;
|
||||
import org.apache.hadoop.hbase.util.HFileTestUtil;
|
||||
import org.hamcrest.MatcherAssert;
|
||||
import org.junit.AfterClass;
|
||||
import org.junit.BeforeClass;
|
||||
import org.junit.ClassRule;
|
||||
|
@ -567,6 +569,25 @@ public class TestBulkLoadHFiles {
|
|||
assertEquals(1000, rowCount);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSplitStoreFileWithCreateTimeTS() throws IOException {
|
||||
Path dir = util.getDataTestDirOnTestFS("testSplitStoreFileWithCreateTimeTS");
|
||||
FileSystem fs = util.getTestFileSystem();
|
||||
Path testIn = new Path(dir, "testhfile");
|
||||
ColumnFamilyDescriptor familyDesc = ColumnFamilyDescriptorBuilder.of(FAMILY);
|
||||
HFileTestUtil.createHFile(util.getConfiguration(), fs, testIn, FAMILY, QUALIFIER,
|
||||
Bytes.toBytes("aaa"), Bytes.toBytes("zzz"), 1000);
|
||||
|
||||
Path bottomOut = new Path(dir, "bottom.out");
|
||||
Path topOut = new Path(dir, "top.out");
|
||||
|
||||
BulkLoadHFilesTool.splitStoreFile(util.getConfiguration(), testIn, familyDesc,
|
||||
Bytes.toBytes("ggg"), bottomOut, topOut);
|
||||
|
||||
verifyHFileCreateTimeTS(bottomOut);
|
||||
verifyHFileCreateTimeTS(topOut);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSplitStoreFileWithNoneToNone() throws IOException {
|
||||
testSplitStoreFileWithDifferentEncoding(DataBlockEncoding.NONE, DataBlockEncoding.NONE);
|
||||
|
@ -623,6 +644,16 @@ public class TestBulkLoadHFiles {
|
|||
return count;
|
||||
}
|
||||
|
||||
private void verifyHFileCreateTimeTS(Path p) throws IOException {
|
||||
Configuration conf = util.getConfiguration();
|
||||
|
||||
try (HFile.Reader reader =
|
||||
HFile.createReader(p.getFileSystem(conf), p, new CacheConfig(conf), true, conf)) {
|
||||
long fileCreateTime = reader.getHFileInfo().getHFileContext().getFileCreateTime();
|
||||
MatcherAssert.assertThat(fileCreateTime, greaterThan(0L));
|
||||
}
|
||||
}
|
||||
|
||||
private void addStartEndKeysForTest(TreeMap<byte[], Integer> map, byte[] first, byte[] last) {
|
||||
Integer value = map.containsKey(first) ? map.get(first) : 0;
|
||||
map.put(first, value + 1);
|
||||
|
|
Loading…
Reference in New Issue