HBASE-23251 - Add Column Family and Table Names to HFileContext and use in HFileWriterImpl logging (#796)
Signed-off-by: Andrew Purtell <apurtell@apache.org> Signed-off-by: Xu Cang <xucang@apache.org> Signed-off-by: Zheng Hu <openinx@gmail.com>
This commit is contained in:
parent
08aae42156
commit
77490f815a
@ -35,10 +35,12 @@ import org.apache.yetus.audience.InterfaceAudience;
|
|||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class HFileContext implements HeapSize, Cloneable {
|
public class HFileContext implements HeapSize, Cloneable {
|
||||||
public static final int FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT +
|
public static final int FIXED_OVERHEAD = ClassSize.align(ClassSize.OBJECT +
|
||||||
// Algorithm, checksumType, encoding, Encryption.Context, hfileName reference
|
// Algorithm, checksumType, encoding, Encryption.Context, hfileName reference,
|
||||||
5 * ClassSize.REFERENCE + 2 * Bytes.SIZEOF_INT +
|
5 * ClassSize.REFERENCE + 2 * Bytes.SIZEOF_INT +
|
||||||
// usesHBaseChecksum, includesMvcc, includesTags and compressTags
|
// usesHBaseChecksum, includesMvcc, includesTags and compressTags
|
||||||
4 * Bytes.SIZEOF_BOOLEAN + Bytes.SIZEOF_LONG);
|
4 * Bytes.SIZEOF_BOOLEAN + Bytes.SIZEOF_LONG +
|
||||||
|
//byte[] headers for column family and table name
|
||||||
|
2 * ClassSize.ARRAY + 2 * ClassSize.REFERENCE);
|
||||||
|
|
||||||
public static final int DEFAULT_BYTES_PER_CHECKSUM = 16 * 1024;
|
public static final int DEFAULT_BYTES_PER_CHECKSUM = 16 * 1024;
|
||||||
|
|
||||||
@ -63,6 +65,8 @@ public class HFileContext implements HeapSize, Cloneable {
|
|||||||
private Encryption.Context cryptoContext = Encryption.Context.NONE;
|
private Encryption.Context cryptoContext = Encryption.Context.NONE;
|
||||||
private long fileCreateTime;
|
private long fileCreateTime;
|
||||||
private String hfileName;
|
private String hfileName;
|
||||||
|
private byte[] columnFamily;
|
||||||
|
private byte[] tableName;
|
||||||
|
|
||||||
//Empty constructor. Go with setters
|
//Empty constructor. Go with setters
|
||||||
public HFileContext() {
|
public HFileContext() {
|
||||||
@ -85,12 +89,15 @@ public class HFileContext implements HeapSize, Cloneable {
|
|||||||
this.cryptoContext = context.cryptoContext;
|
this.cryptoContext = context.cryptoContext;
|
||||||
this.fileCreateTime = context.fileCreateTime;
|
this.fileCreateTime = context.fileCreateTime;
|
||||||
this.hfileName = context.hfileName;
|
this.hfileName = context.hfileName;
|
||||||
|
this.columnFamily = context.columnFamily;
|
||||||
|
this.tableName = context.tableName;
|
||||||
}
|
}
|
||||||
|
|
||||||
HFileContext(boolean useHBaseChecksum, boolean includesMvcc, boolean includesTags,
|
HFileContext(boolean useHBaseChecksum, boolean includesMvcc, boolean includesTags,
|
||||||
Compression.Algorithm compressAlgo, boolean compressTags, ChecksumType checksumType,
|
Compression.Algorithm compressAlgo, boolean compressTags, ChecksumType checksumType,
|
||||||
int bytesPerChecksum, int blockSize, DataBlockEncoding encoding,
|
int bytesPerChecksum, int blockSize, DataBlockEncoding encoding,
|
||||||
Encryption.Context cryptoContext, long fileCreateTime, String hfileName) {
|
Encryption.Context cryptoContext, long fileCreateTime, String hfileName,
|
||||||
|
byte[] columnFamily, byte[] tableName) {
|
||||||
this.usesHBaseChecksum = useHBaseChecksum;
|
this.usesHBaseChecksum = useHBaseChecksum;
|
||||||
this.includesMvcc = includesMvcc;
|
this.includesMvcc = includesMvcc;
|
||||||
this.includesTags = includesTags;
|
this.includesTags = includesTags;
|
||||||
@ -105,6 +112,8 @@ public class HFileContext implements HeapSize, Cloneable {
|
|||||||
this.cryptoContext = cryptoContext;
|
this.cryptoContext = cryptoContext;
|
||||||
this.fileCreateTime = fileCreateTime;
|
this.fileCreateTime = fileCreateTime;
|
||||||
this.hfileName = hfileName;
|
this.hfileName = hfileName;
|
||||||
|
this.columnFamily = columnFamily;
|
||||||
|
this.tableName = tableName;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -192,6 +201,13 @@ public class HFileContext implements HeapSize, Cloneable {
|
|||||||
return this.hfileName;
|
return this.hfileName;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public byte[] getColumnFamily() {
|
||||||
|
return this.columnFamily;
|
||||||
|
}
|
||||||
|
|
||||||
|
public byte[] getTableName() {
|
||||||
|
return this.tableName;
|
||||||
|
}
|
||||||
/**
|
/**
|
||||||
* HeapSize implementation. NOTE : The heap size should be altered when new state variable are
|
* HeapSize implementation. NOTE : The heap size should be altered when new state variable are
|
||||||
* added.
|
* added.
|
||||||
@ -203,6 +219,12 @@ public class HFileContext implements HeapSize, Cloneable {
|
|||||||
if (this.hfileName != null) {
|
if (this.hfileName != null) {
|
||||||
size += ClassSize.STRING + this.hfileName.length();
|
size += ClassSize.STRING + this.hfileName.length();
|
||||||
}
|
}
|
||||||
|
if (this.columnFamily != null){
|
||||||
|
size += ClassSize.sizeOfByteArray(this.columnFamily.length);
|
||||||
|
}
|
||||||
|
if (this.tableName != null){
|
||||||
|
size += ClassSize.sizeOfByteArray(this.tableName.length);
|
||||||
|
}
|
||||||
return size;
|
return size;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -233,6 +255,14 @@ public class HFileContext implements HeapSize, Cloneable {
|
|||||||
sb.append(", name=");
|
sb.append(", name=");
|
||||||
sb.append(hfileName);
|
sb.append(hfileName);
|
||||||
}
|
}
|
||||||
|
if (tableName != null) {
|
||||||
|
sb.append(", tableName=");
|
||||||
|
sb.append(Bytes.toStringBinary(tableName));
|
||||||
|
}
|
||||||
|
if (columnFamily != null) {
|
||||||
|
sb.append(", columnFamily=");
|
||||||
|
sb.append(Bytes.toStringBinary(columnFamily));
|
||||||
|
}
|
||||||
sb.append("]");
|
sb.append("]");
|
||||||
return sb.toString();
|
return sb.toString();
|
||||||
}
|
}
|
||||||
|
@ -54,6 +54,8 @@ public class HFileContextBuilder {
|
|||||||
private long fileCreateTime = 0;
|
private long fileCreateTime = 0;
|
||||||
|
|
||||||
private String hfileName = null;
|
private String hfileName = null;
|
||||||
|
private byte[] columnFamily = null;
|
||||||
|
private byte[] tableName = null;
|
||||||
|
|
||||||
public HFileContextBuilder() {}
|
public HFileContextBuilder() {}
|
||||||
|
|
||||||
@ -73,6 +75,8 @@ public class HFileContextBuilder {
|
|||||||
this.cryptoContext = hfc.getEncryptionContext();
|
this.cryptoContext = hfc.getEncryptionContext();
|
||||||
this.fileCreateTime = hfc.getFileCreateTime();
|
this.fileCreateTime = hfc.getFileCreateTime();
|
||||||
this.hfileName = hfc.getHFileName();
|
this.hfileName = hfc.getHFileName();
|
||||||
|
this.columnFamily = hfc.getColumnFamily();
|
||||||
|
this.tableName = hfc.getTableName();
|
||||||
}
|
}
|
||||||
|
|
||||||
public HFileContextBuilder withHBaseCheckSum(boolean useHBaseCheckSum) {
|
public HFileContextBuilder withHBaseCheckSum(boolean useHBaseCheckSum) {
|
||||||
@ -135,9 +139,19 @@ public class HFileContextBuilder {
|
|||||||
return this;
|
return this;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public HFileContextBuilder withColumnFamily(byte[] columnFamily){
|
||||||
|
this.columnFamily = columnFamily;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
|
public HFileContextBuilder withTableName(byte[] tableName){
|
||||||
|
this.tableName = tableName;
|
||||||
|
return this;
|
||||||
|
}
|
||||||
|
|
||||||
public HFileContext build() {
|
public HFileContext build() {
|
||||||
return new HFileContext(usesHBaseChecksum, includesMvcc, includesTags, compression,
|
return new HFileContext(usesHBaseChecksum, includesMvcc, includesTags, compression,
|
||||||
compressTags, checksumType, bytesPerChecksum, blocksize, encoding, cryptoContext,
|
compressTags, checksumType, bytesPerChecksum, blocksize, encoding, cryptoContext,
|
||||||
fileCreateTime, hfileName);
|
fileCreateTime, hfileName, columnFamily, tableName);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -412,7 +412,9 @@ public class HFileOutputFormat2
|
|||||||
.withCompression(compression)
|
.withCompression(compression)
|
||||||
.withChecksumType(HStore.getChecksumType(conf))
|
.withChecksumType(HStore.getChecksumType(conf))
|
||||||
.withBytesPerCheckSum(HStore.getBytesPerChecksum(conf))
|
.withBytesPerCheckSum(HStore.getBytesPerChecksum(conf))
|
||||||
.withBlockSize(blockSize);
|
.withBlockSize(blockSize)
|
||||||
|
.withColumnFamily(family)
|
||||||
|
.withTableName(tableName);
|
||||||
|
|
||||||
if (HFile.getFormatVersion(conf) >= HFile.MIN_FORMAT_VERSION_WITH_TAGS) {
|
if (HFile.getFormatVersion(conf) >= HFile.MIN_FORMAT_VERSION_WITH_TAGS) {
|
||||||
contextBuilder.withIncludesTags(true);
|
contextBuilder.withIncludesTags(true);
|
||||||
|
@ -1266,6 +1266,8 @@ public class HFileBlock implements Cacheable {
|
|||||||
.withCompressTags(fileContext.isCompressTags())
|
.withCompressTags(fileContext.isCompressTags())
|
||||||
.withIncludesMvcc(fileContext.isIncludesMvcc())
|
.withIncludesMvcc(fileContext.isIncludesMvcc())
|
||||||
.withIncludesTags(fileContext.isIncludesTags())
|
.withIncludesTags(fileContext.isIncludesTags())
|
||||||
|
.withColumnFamily(fileContext.getColumnFamily())
|
||||||
|
.withTableName(fileContext.getTableName())
|
||||||
.build();
|
.build();
|
||||||
// Build the HFileBlock.
|
// Build the HFileBlock.
|
||||||
HFileBlockBuilder builder = new HFileBlockBuilder();
|
HFileBlockBuilder builder = new HFileBlockBuilder();
|
||||||
|
@ -240,10 +240,9 @@ public class HFileWriterImpl implements HFile.Writer {
|
|||||||
}
|
}
|
||||||
if (lastCell != null) {
|
if (lastCell != null) {
|
||||||
int keyComp = PrivateCellUtil.compareKeyIgnoresMvcc(comparator, lastCell, cell);
|
int keyComp = PrivateCellUtil.compareKeyIgnoresMvcc(comparator, lastCell, cell);
|
||||||
|
|
||||||
if (keyComp > 0) {
|
if (keyComp > 0) {
|
||||||
throw new IOException("Added a key not lexically larger than"
|
String message = getLexicalErrorMessage(cell);
|
||||||
+ " previous. Current cell = " + cell + ", lastCell = " + lastCell);
|
throw new IOException(message);
|
||||||
} else if (keyComp == 0) {
|
} else if (keyComp == 0) {
|
||||||
isDuplicateKey = true;
|
isDuplicateKey = true;
|
||||||
}
|
}
|
||||||
@ -251,6 +250,18 @@ public class HFileWriterImpl implements HFile.Writer {
|
|||||||
return isDuplicateKey;
|
return isDuplicateKey;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private String getLexicalErrorMessage(Cell cell) {
|
||||||
|
StringBuilder sb = new StringBuilder();
|
||||||
|
sb.append("Added a key not lexically larger than previous. Current cell = ");
|
||||||
|
sb.append(cell);
|
||||||
|
sb.append(", lastCell = ");
|
||||||
|
sb.append(lastCell);
|
||||||
|
//file context includes HFile path and optionally table and CF of file being written
|
||||||
|
sb.append("fileContext=");
|
||||||
|
sb.append(hFileContext);
|
||||||
|
return sb.toString();
|
||||||
|
}
|
||||||
|
|
||||||
/** Checks the given value for validity. */
|
/** Checks the given value for validity. */
|
||||||
protected void checkValue(final byte[] value, final int offset,
|
protected void checkValue(final byte[] value, final int offset,
|
||||||
final int length) throws IOException {
|
final int length) throws IOException {
|
||||||
|
@ -1162,6 +1162,9 @@ public class HStore implements Store, HeapSize, StoreConfigInformation, Propagat
|
|||||||
.withDataBlockEncoding(family.getDataBlockEncoding())
|
.withDataBlockEncoding(family.getDataBlockEncoding())
|
||||||
.withEncryptionContext(cryptoContext)
|
.withEncryptionContext(cryptoContext)
|
||||||
.withCreateTime(EnvironmentEdgeManager.currentTime())
|
.withCreateTime(EnvironmentEdgeManager.currentTime())
|
||||||
|
.withColumnFamily(family.getName())
|
||||||
|
.withTableName(region.getTableDescriptor()
|
||||||
|
.getTableName().getName())
|
||||||
.build();
|
.build();
|
||||||
return hFileContext;
|
return hFileContext;
|
||||||
}
|
}
|
||||||
|
@ -47,7 +47,10 @@ import org.apache.hadoop.fs.Path;
|
|||||||
import org.apache.hadoop.hbase.ArrayBackedTag;
|
import org.apache.hadoop.hbase.ArrayBackedTag;
|
||||||
import org.apache.hadoop.hbase.ByteBufferKeyValue;
|
import org.apache.hadoop.hbase.ByteBufferKeyValue;
|
||||||
import org.apache.hadoop.hbase.Cell;
|
import org.apache.hadoop.hbase.Cell;
|
||||||
|
import org.apache.hadoop.hbase.CellBuilder;
|
||||||
|
import org.apache.hadoop.hbase.CellBuilderFactory;
|
||||||
import org.apache.hadoop.hbase.CellBuilderType;
|
import org.apache.hadoop.hbase.CellBuilderType;
|
||||||
|
import org.apache.hadoop.hbase.CellComparator;
|
||||||
import org.apache.hadoop.hbase.CellComparatorImpl;
|
import org.apache.hadoop.hbase.CellComparatorImpl;
|
||||||
import org.apache.hadoop.hbase.CellUtil;
|
import org.apache.hadoop.hbase.CellUtil;
|
||||||
import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
|
import org.apache.hadoop.hbase.ExtendedCellBuilderFactory;
|
||||||
@ -82,6 +85,7 @@ import org.junit.Rule;
|
|||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.junit.experimental.categories.Category;
|
import org.junit.experimental.categories.Category;
|
||||||
import org.junit.rules.TestName;
|
import org.junit.rules.TestName;
|
||||||
|
import org.mockito.Mockito;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
|
|
||||||
@ -340,6 +344,48 @@ public class TestHFile {
|
|||||||
fail("Should have thrown exception");
|
fail("Should have thrown exception");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testCorruptOutOfOrderHFileWrite() throws IOException {
|
||||||
|
Path path = new Path(ROOT_DIR, testName.getMethodName());
|
||||||
|
FSDataOutputStream mockedOutputStream = Mockito.mock(FSDataOutputStream.class);
|
||||||
|
String columnFamily = "MyColumnFamily";
|
||||||
|
String tableName = "MyTableName";
|
||||||
|
HFileContext fileContext = new HFileContextBuilder()
|
||||||
|
.withHFileName(testName.getMethodName() + "HFile")
|
||||||
|
.withBlockSize(minBlockSize)
|
||||||
|
.withColumnFamily(Bytes.toBytes(columnFamily))
|
||||||
|
.withTableName(Bytes.toBytes(tableName))
|
||||||
|
.withHBaseCheckSum(false)
|
||||||
|
.withCompression(Compression.Algorithm.NONE)
|
||||||
|
.withCompressTags(false)
|
||||||
|
.build();
|
||||||
|
HFileWriterImpl writer = new HFileWriterImpl(conf, cacheConf, path, mockedOutputStream,
|
||||||
|
CellComparator.getInstance(), fileContext);
|
||||||
|
CellBuilder cellBuilder = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY);
|
||||||
|
byte[] row = Bytes.toBytes("foo");
|
||||||
|
byte[] qualifier = Bytes.toBytes("qualifier");
|
||||||
|
byte[] cf = Bytes.toBytes(columnFamily);
|
||||||
|
byte[] val = Bytes.toBytes("fooVal");
|
||||||
|
long firstTS = 100L;
|
||||||
|
long secondTS = 101L;
|
||||||
|
Cell firstCell = cellBuilder.setRow(row).setValue(val).setTimestamp(firstTS)
|
||||||
|
.setQualifier(qualifier).setFamily(cf).setType(Cell.Type.Put).build();
|
||||||
|
Cell secondCell= cellBuilder.setRow(row).setValue(val).setTimestamp(secondTS)
|
||||||
|
.setQualifier(qualifier).setFamily(cf).setType(Cell.Type.Put).build();
|
||||||
|
//second Cell will sort "higher" than the first because later timestamps should come first
|
||||||
|
writer.append(firstCell);
|
||||||
|
try {
|
||||||
|
writer.append(secondCell);
|
||||||
|
} catch(IOException ie){
|
||||||
|
String message = ie.getMessage();
|
||||||
|
Assert.assertTrue(message.contains("not lexically larger"));
|
||||||
|
Assert.assertTrue(message.contains(tableName));
|
||||||
|
Assert.assertTrue(message.contains(columnFamily));
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
Assert.fail("Exception wasn't thrown even though Cells were appended in the wrong order!");
|
||||||
|
}
|
||||||
|
|
||||||
public static void truncateFile(FileSystem fs, Path src, Path dst) throws IOException {
|
public static void truncateFile(FileSystem fs, Path src, Path dst) throws IOException {
|
||||||
FileStatus fst = fs.getFileStatus(src);
|
FileStatus fst = fs.getFileStatus(src);
|
||||||
long len = fst.getLen();
|
long len = fst.getLen();
|
||||||
|
@ -957,7 +957,8 @@ public class TestHFileBlock {
|
|||||||
long expected = hfileBlockExpectedSize + byteBufferExpectedSize + hfileMetaSize;
|
long expected = hfileBlockExpectedSize + byteBufferExpectedSize + hfileMetaSize;
|
||||||
assertEquals("Block data size: " + size + ", byte buffer expected " +
|
assertEquals("Block data size: " + size + ", byte buffer expected " +
|
||||||
"size: " + byteBufferExpectedSize + ", HFileBlock class expected " +
|
"size: " + byteBufferExpectedSize + ", HFileBlock class expected " +
|
||||||
"size: " + hfileBlockExpectedSize + ";", expected,
|
"size: " + hfileBlockExpectedSize + " HFileContext class expected size: "
|
||||||
|
+ hfileMetaSize + "; ", expected,
|
||||||
block.heapSize());
|
block.heapSize());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -17,6 +17,7 @@
|
|||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase.regionserver;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
|
import static org.junit.Assert.assertArrayEquals;
|
||||||
import static org.junit.Assert.assertEquals;
|
import static org.junit.Assert.assertEquals;
|
||||||
import static org.junit.Assert.assertFalse;
|
import static org.junit.Assert.assertFalse;
|
||||||
import static org.junit.Assert.assertTrue;
|
import static org.junit.Assert.assertTrue;
|
||||||
@ -165,6 +166,7 @@ public class TestHStore {
|
|||||||
*/
|
*/
|
||||||
@Before
|
@Before
|
||||||
public void setUp() throws IOException {
|
public void setUp() throws IOException {
|
||||||
|
qualifiers.clear();
|
||||||
qualifiers.add(qf1);
|
qualifiers.add(qf1);
|
||||||
qualifiers.add(qf3);
|
qualifiers.add(qf3);
|
||||||
qualifiers.add(qf5);
|
qualifiers.add(qf5);
|
||||||
@ -1704,6 +1706,16 @@ public class TestHStore {
|
|||||||
assertEquals(8192L, sizeStore.getRegionSize(regionInfo2).getSize());
|
assertEquals(8192L, sizeStore.getRegionSize(regionInfo2).getSize());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testHFileContextSetWithCFAndTable() throws Exception {
|
||||||
|
init(this.name.getMethodName());
|
||||||
|
StoreFileWriter writer = store.createWriterInTmp(10000L,
|
||||||
|
Compression.Algorithm.NONE, false, true, false, true);
|
||||||
|
HFileContext hFileContext = writer.getHFileWriter().getFileContext();
|
||||||
|
assertArrayEquals(family, hFileContext.getColumnFamily());
|
||||||
|
assertArrayEquals(table, hFileContext.getTableName());
|
||||||
|
}
|
||||||
|
|
||||||
private HStoreFile mockStoreFileWithLength(long length) {
|
private HStoreFile mockStoreFileWithLength(long length) {
|
||||||
HStoreFile sf = mock(HStoreFile.class);
|
HStoreFile sf = mock(HStoreFile.class);
|
||||||
StoreFileReader sfr = mock(StoreFileReader.class);
|
StoreFileReader sfr = mock(StoreFileReader.class);
|
||||||
|
@ -120,6 +120,7 @@ public class HFileTestUtil {
|
|||||||
HFileContext meta = new HFileContextBuilder()
|
HFileContext meta = new HFileContextBuilder()
|
||||||
.withIncludesTags(withTag)
|
.withIncludesTags(withTag)
|
||||||
.withDataBlockEncoding(encoding)
|
.withDataBlockEncoding(encoding)
|
||||||
|
.withColumnFamily(family)
|
||||||
.build();
|
.build();
|
||||||
HFile.Writer writer = HFile.getWriterFactory(configuration, new CacheConfig(configuration))
|
HFile.Writer writer = HFile.getWriterFactory(configuration, new CacheConfig(configuration))
|
||||||
.withPath(fs, path)
|
.withPath(fs, path)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user