HBASE-23288 - Backport HBASE-23251 (Add Column Family and Table Names to HFileContext) to branch-1 (#822)

Signed-off-by: Andrew Purtell <apurtell@apache.org>
This commit is contained in:
Geoffrey Jacoby 2019-11-18 16:56:10 -08:00 committed by Andrew Purtell
parent 8e60b0c0c6
commit 5f36343949
9 changed files with 128 additions and 10 deletions

View File

@ -57,6 +57,8 @@ public class HFileContext implements HeapSize, Cloneable {
private Encryption.Context cryptoContext = Encryption.Context.NONE;
private long fileCreateTime;
private String hfileName;
private byte[] columnFamily;
private byte[] tableName;
//Empty constructor. Go with setters
public HFileContext() {
@ -79,12 +81,15 @@ public class HFileContext implements HeapSize, Cloneable {
this.cryptoContext = context.cryptoContext;
this.fileCreateTime = context.fileCreateTime;
this.hfileName = context.hfileName;
this.columnFamily = context.columnFamily;
this.tableName = context.tableName;
}
public HFileContext(boolean useHBaseChecksum, boolean includesMvcc, boolean includesTags,
Compression.Algorithm compressAlgo, boolean compressTags, ChecksumType checksumType,
int bytesPerChecksum, int blockSize, DataBlockEncoding encoding,
Encryption.Context cryptoContext, long fileCreateTime, String hfileName) {
Compression.Algorithm compressAlgo, boolean compressTags, ChecksumType checksumType,
int bytesPerChecksum, int blockSize, DataBlockEncoding encoding,
Encryption.Context cryptoContext, long fileCreateTime, String hfileName,
byte[] columnFamily, byte[] tableName) {
this.usesHBaseChecksum = useHBaseChecksum;
this.includesMvcc = includesMvcc;
this.includesTags = includesTags;
@ -99,6 +104,8 @@ public class HFileContext implements HeapSize, Cloneable {
this.cryptoContext = cryptoContext;
this.fileCreateTime = fileCreateTime;
this.hfileName = hfileName;
this.columnFamily = columnFamily;
this.tableName = tableName;
}
/**
@ -194,6 +201,14 @@ public class HFileContext implements HeapSize, Cloneable {
return this.hfileName;
}
public byte[] getColumnFamily() {
return this.columnFamily;
}
public byte[] getTableName() {
return this.tableName;
}
/**
* HeapSize implementation
* NOTE : The heapsize should be altered as and when new state variable are added
@ -207,7 +222,15 @@ public class HFileContext implements HeapSize, Cloneable {
2 * Bytes.SIZEOF_INT +
// usesHBaseChecksum, includesMvcc, includesTags and compressTags
4 * Bytes.SIZEOF_BOOLEAN +
//column family, table name byte arrays
2 * ClassSize.ARRAY + 2 * ClassSize.REFERENCE +
Bytes.SIZEOF_LONG);
if (this.columnFamily != null){
size += ClassSize.sizeOf(this.columnFamily, this.columnFamily.length);
}
if (this.tableName != null){
size += ClassSize.sizeOf(this.tableName, this.tableName.length);
}
return size;
}
@ -233,9 +256,17 @@ public class HFileContext implements HeapSize, Cloneable {
sb.append(" includesTags="); sb.append(includesTags);
sb.append(" compressAlgo="); sb.append(compressAlgo);
sb.append(" compressTags="); sb.append(compressTags);
sb.append(" cryptoContext=[ "); sb.append(cryptoContext); sb.append(" ]");
sb.append(" cryptoContext=[ "); sb.append(cryptoContext);
sb.append(" ]");
if (tableName != null) {
sb.append(", tableName=");
sb.append(Bytes.toString(tableName));
}
if (columnFamily != null) {
sb.append(", columnFamily=");
sb.append(Bytes.toString(columnFamily));
}
sb.append(" ]");
return sb.toString();
}
}

View File

@ -54,6 +54,8 @@ public class HFileContextBuilder {
private long fileCreateTime = 0;
private String hfileName = null;
private byte[] columnFamily = null;
private byte[] tableName = null;
public HFileContextBuilder() {}
@ -73,6 +75,8 @@ public class HFileContextBuilder {
this.cryptoContext = hfc.getEncryptionContext();
this.fileCreateTime = hfc.getFileCreateTime();
this.hfileName = hfc.getHFileName();
this.columnFamily = hfc.getColumnFamily();
this.tableName = hfc.getTableName();
}
public HFileContextBuilder withHBaseCheckSum(boolean useHBaseCheckSum) {
@ -135,9 +139,19 @@ public class HFileContextBuilder {
return this;
}
public HFileContextBuilder withColumnFamily(byte[] columnFamily){
this.columnFamily = columnFamily;
return this;
}
public HFileContextBuilder withTableName(byte[] tableName){
this.tableName = tableName;
return this;
}
public HFileContext build() {
return new HFileContext(usesHBaseChecksum, includesMvcc, includesTags, compression,
compressTags, checksumType, bytesPerChecksum, blocksize, encoding, cryptoContext,
fileCreateTime, hfileName);
fileCreateTime, hfileName, columnFamily, tableName);
}
}

View File

@ -201,8 +201,8 @@ public abstract class AbstractHFileWriter implements HFile.Writer {
int keyComp = comparator.compareOnlyKeyPortion(lastCell, cell);
if (keyComp > 0) {
throw new IOException("Added a key not lexically larger than"
+ " previous. Current cell = " + cell + ", lastCell = " + lastCell);
String message = getLexicalErrorMessage(cell);
throw new IOException(message);
} else if (keyComp == 0) {
isDuplicateKey = true;
}
@ -210,6 +210,18 @@ public abstract class AbstractHFileWriter implements HFile.Writer {
return isDuplicateKey;
}
private String getLexicalErrorMessage(Cell cell) {
StringBuilder sb = new StringBuilder();
sb.append("Added a key not lexically larger than previous. Current cell = ");
sb.append(cell);
sb.append(", lastCell = ");
sb.append(lastCell);
//file context includes HFile path and optionally table and CF of file being written
sb.append("fileContext=");
sb.append(hFileContext);
return sb.toString();
}
/** Checks the given value for validity. */
protected void checkValue(final byte[] value, final int offset,
final int length) throws IOException {

View File

@ -1292,6 +1292,8 @@ public class HFileBlock implements Cacheable {
.withCompressTags(fileContext.isCompressTags())
.withIncludesMvcc(fileContext.isIncludesMvcc())
.withIncludesTags(fileContext.isIncludesTags())
.withColumnFamily(fileContext.getColumnFamily())
.withTableName(fileContext.getTableName())
.build();
return new HFileBlock(blockType, getOnDiskSizeWithoutHeader(),
getUncompressedSizeWithoutHeader(), prevOffset,

View File

@ -316,8 +316,8 @@ public class HFileOutputFormat2
.withCompression(compression)
.withChecksumType(HStore.getChecksumType(conf))
.withBytesPerCheckSum(HStore.getBytesPerChecksum(conf))
.withBlockSize(blockSize);
.withBlockSize(blockSize)
.withColumnFamily(family);
if (HFile.getFormatVersion(conf) >= HFile.MIN_FORMAT_VERSION_WITH_TAGS) {
contextBuilder.withIncludesTags(true);
}

View File

@ -1158,6 +1158,9 @@ public class HStore implements Store {
.withDataBlockEncoding(family.getDataBlockEncoding())
.withEncryptionContext(cryptoContext)
.withCreateTime(EnvironmentEdgeManager.currentTime())
.withColumnFamily(family.getName())
.withTableName(region.getTableDesc().
getTableName().getName())
.build();
return hFileContext;
}

View File

@ -32,6 +32,8 @@ import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HBaseTestingUtility;
import org.apache.hadoop.hbase.HConstants;
@ -44,7 +46,10 @@ import org.apache.hadoop.hbase.io.hfile.HFile.Reader;
import org.apache.hadoop.hbase.io.hfile.HFile.Writer;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.io.Writable;
import org.junit.Assert;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.Mockito;
/**
* test hfile features.
@ -95,6 +100,44 @@ public class TestHFile extends HBaseTestCase {
assertNull(r.getLastKey());
}
@Test
public void testCorruptOutOfOrderHFileWrite() throws IOException {
Path path = new Path(ROOT_DIR, "testCorruptOutOfOrderHFileWrite");
FSDataOutputStream mockedOutputStream = Mockito.mock(FSDataOutputStream.class);
String columnFamily = "MyColumnFamily";
String tableName = "MyTableName";
HFileContext fileContext = new HFileContextBuilder()
.withHFileName("testCorruptOutOfOrderHFileWriteHFile")
.withBlockSize(minBlockSize)
.withColumnFamily(Bytes.toBytes(columnFamily))
.withTableName(Bytes.toBytes(tableName))
.withHBaseCheckSum(false)
.withCompression(Compression.Algorithm.NONE)
.withCompressTags(false)
.build();
HFileWriterV3 writer = new HFileWriterV3(conf, cacheConf, fs, path, mockedOutputStream,
new KeyValue.KVComparator(), fileContext);
byte[] row = Bytes.toBytes("foo");
byte[] qualifier = Bytes.toBytes("qualifier");
byte[] cf = Bytes.toBytes(columnFamily);
byte[] val = Bytes.toBytes("fooVal");
long firstTS = 100L;
long secondTS = 101L;
Cell firstCell = CellUtil.createCell(row,cf, qualifier, firstTS, Type.Put.getCode(), val);
Cell secondCell= CellUtil.createCell(row,cf, qualifier, secondTS, Type.Put.getCode(), val);
//second Cell will sort "higher" than the first because later timestamps should come first
writer.append(firstCell);
try {
writer.append(secondCell);
} catch(IOException ie){
String message = ie.getMessage();
Assert.assertTrue(message.contains("not lexically larger"));
Assert.assertTrue(message.contains(tableName));
Assert.assertTrue(message.contains(columnFamily));
return;
}
Assert.fail("Exception wasn't thrown even though Cells were appended in the wrong order!");
}
/**
* Create 0-length hfile and show that it fails
*/

View File

@ -19,6 +19,7 @@
package org.apache.hadoop.hbase.regionserver;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
@ -144,6 +145,7 @@ public class TestStore {
*/
@Before
public void setUp() throws IOException {
qualifiers.clear();
qualifiers.add(qf1);
qualifiers.add(qf3);
qualifiers.add(qf5);
@ -1449,6 +1451,16 @@ public class TestStore {
}
}
@Test
public void testHFileContextSetWithCFAndTable() throws Exception {
init(this.name.getMethodName());
StoreFile.Writer writer = store.createWriterInTmp(10000L,
Compression.Algorithm.NONE, false, true, false, true);
HFileContext hFileContext = writer.getHFileWriter().getFileContext();
assertArrayEquals(family, hFileContext.getColumnFamily());
assertArrayEquals(table, hFileContext.getTableName());
}
private MyStore initMyStore(String methodName, Configuration conf, MyStoreHook hook) throws IOException {
HTableDescriptor htd = new HTableDescriptor(TableName.valueOf(table));
HColumnDescriptor hcd = new HColumnDescriptor(family);

View File

@ -104,6 +104,7 @@ public class HFileTestUtil {
HFileContext meta = new HFileContextBuilder()
.withIncludesTags(withTag)
.withDataBlockEncoding(encoding)
.withColumnFamily(family)
.build();
HFile.Writer writer = HFile.getWriterFactory(configuration, new CacheConfig(configuration))
.withPath(fs, path)