HBASE-9126 Make HFile MIN VERSION as 2
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1511023 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
10a25c52cf
commit
344b028f0e
|
@ -241,29 +241,17 @@ public class FixedFileTrailer {
|
|||
output.writeLong(loadOnOpenDataOffset);
|
||||
output.writeInt(dataIndexCount);
|
||||
|
||||
if (majorVersion == 1) {
|
||||
// This used to be metaIndexOffset, but it was not used in version 1.
|
||||
output.writeLong(0);
|
||||
} else {
|
||||
output.writeLong(uncompressedDataIndexSize);
|
||||
}
|
||||
output.writeLong(uncompressedDataIndexSize);
|
||||
|
||||
output.writeInt(metaIndexCount);
|
||||
output.writeLong(totalUncompressedBytes);
|
||||
if (majorVersion == 1) {
|
||||
output.writeInt((int) Math.min(Integer.MAX_VALUE, entryCount));
|
||||
} else {
|
||||
// This field is long from version 2 onwards.
|
||||
output.writeLong(entryCount);
|
||||
}
|
||||
output.writeLong(entryCount);
|
||||
output.writeInt(compressionCodec.ordinal());
|
||||
|
||||
if (majorVersion > 1) {
|
||||
output.writeInt(numDataIndexLevels);
|
||||
output.writeLong(firstDataBlockOffset);
|
||||
output.writeLong(lastDataBlockOffset);
|
||||
Bytes.writeStringFixedSize(output, comparatorClassName, MAX_COMPARATOR_NAME_LENGTH);
|
||||
}
|
||||
output.writeInt(numDataIndexLevels);
|
||||
output.writeLong(firstDataBlockOffset);
|
||||
output.writeLong(lastDataBlockOffset);
|
||||
Bytes.writeStringFixedSize(output, comparatorClassName, MAX_COMPARATOR_NAME_LENGTH);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -354,23 +342,17 @@ public class FixedFileTrailer {
|
|||
fileInfoOffset = input.readLong();
|
||||
loadOnOpenDataOffset = input.readLong();
|
||||
dataIndexCount = input.readInt();
|
||||
if (majorVersion == 1) {
|
||||
input.readLong(); // Read and skip metaIndexOffset.
|
||||
} else {
|
||||
uncompressedDataIndexSize = input.readLong();
|
||||
}
|
||||
uncompressedDataIndexSize = input.readLong();
|
||||
metaIndexCount = input.readInt();
|
||||
|
||||
totalUncompressedBytes = input.readLong();
|
||||
entryCount = majorVersion == 1 ? input.readInt() : input.readLong();
|
||||
entryCount = input.readLong();
|
||||
compressionCodec = Compression.Algorithm.values()[input.readInt()];
|
||||
if (majorVersion > 1) {
|
||||
numDataIndexLevels = input.readInt();
|
||||
firstDataBlockOffset = input.readLong();
|
||||
lastDataBlockOffset = input.readLong();
|
||||
setComparatorClass(getComparatorClass(Bytes.readStringFixedSize(input,
|
||||
numDataIndexLevels = input.readInt();
|
||||
firstDataBlockOffset = input.readLong();
|
||||
lastDataBlockOffset = input.readLong();
|
||||
setComparatorClass(getComparatorClass(Bytes.readStringFixedSize(input,
|
||||
MAX_COMPARATOR_NAME_LENGTH)));
|
||||
}
|
||||
}
|
||||
|
||||
private void append(StringBuilder sb, String s) {
|
||||
|
@ -389,13 +371,11 @@ public class FixedFileTrailer {
|
|||
append(sb, "totalUncomressedBytes=" + totalUncompressedBytes);
|
||||
append(sb, "entryCount=" + entryCount);
|
||||
append(sb, "compressionCodec=" + compressionCodec);
|
||||
if (majorVersion == 2) {
|
||||
append(sb, "uncompressedDataIndexSize=" + uncompressedDataIndexSize);
|
||||
append(sb, "numDataIndexLevels=" + numDataIndexLevels);
|
||||
append(sb, "firstDataBlockOffset=" + firstDataBlockOffset);
|
||||
append(sb, "lastDataBlockOffset=" + lastDataBlockOffset);
|
||||
append(sb, "comparatorClassName=" + comparatorClassName);
|
||||
}
|
||||
append(sb, "uncompressedDataIndexSize=" + uncompressedDataIndexSize);
|
||||
append(sb, "numDataIndexLevels=" + numDataIndexLevels);
|
||||
append(sb, "firstDataBlockOffset=" + firstDataBlockOffset);
|
||||
append(sb, "lastDataBlockOffset=" + lastDataBlockOffset);
|
||||
append(sb, "comparatorClassName=" + comparatorClassName);
|
||||
append(sb, "majorVersion=" + majorVersion);
|
||||
append(sb, "minorVersion=" + minorVersion);
|
||||
|
||||
|
@ -516,15 +496,6 @@ public class FixedFileTrailer {
|
|||
}
|
||||
|
||||
public void setEntryCount(long newEntryCount) {
|
||||
if (majorVersion == 1) {
|
||||
int intEntryCount = (int) Math.min(Integer.MAX_VALUE, newEntryCount);
|
||||
if (intEntryCount != newEntryCount) {
|
||||
LOG.info("Warning: entry count is " + newEntryCount + " but writing "
|
||||
+ intEntryCount + " into the version " + majorVersion + " trailer");
|
||||
}
|
||||
entryCount = intEntryCount;
|
||||
return;
|
||||
}
|
||||
entryCount = newEntryCount;
|
||||
}
|
||||
|
||||
|
@ -626,8 +597,6 @@ public class FixedFileTrailer {
|
|||
}
|
||||
|
||||
public long getUncompressedDataIndexSize() {
|
||||
if (majorVersion == 1)
|
||||
return 0;
|
||||
return uncompressedDataIndexSize;
|
||||
}
|
||||
|
||||
|
|
|
@ -62,7 +62,6 @@ import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos;
|
|||
import org.apache.hadoop.hbase.protobuf.generated.HBaseProtos.BytesBytesPair;
|
||||
import org.apache.hadoop.hbase.protobuf.generated.HFileProtos;
|
||||
import org.apache.hadoop.hbase.io.FSDataInputStreamWrapper;
|
||||
import org.apache.hadoop.hbase.regionserver.StoreFile.WriterBuilder;
|
||||
import org.apache.hadoop.hbase.util.BloomFilterWriter;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.ChecksumType;
|
||||
|
@ -153,7 +152,7 @@ public class HFile {
|
|||
Compression.Algorithm.NONE;
|
||||
|
||||
/** Minimum supported HFile format version */
|
||||
public static final int MIN_FORMAT_VERSION = 1;
|
||||
public static final int MIN_FORMAT_VERSION = 2;
|
||||
|
||||
/** Maximum supported HFile format version
|
||||
*/
|
||||
|
|
|
@ -197,35 +197,12 @@ public final class BloomFilterFactory {
|
|||
MAX_ALLOWED_FOLD_FACTOR);
|
||||
|
||||
// Do we support compound bloom filters?
|
||||
if (HFile.getFormatVersion(conf) > HFile.MIN_FORMAT_VERSION) {
|
||||
// In case of compound Bloom filters we ignore the maxKeys hint.
|
||||
CompoundBloomFilterWriter bloomWriter = new CompoundBloomFilterWriter(
|
||||
getBloomBlockSize(conf), err, Hash.getHashType(conf), maxFold,
|
||||
cacheConf.shouldCacheBloomsOnWrite(), bloomType == BloomType.ROWCOL
|
||||
? KeyValue.KEY_COMPARATOR : Bytes.BYTES_RAWCOMPARATOR);
|
||||
writer.addInlineBlockWriter(bloomWriter);
|
||||
return bloomWriter;
|
||||
} else {
|
||||
// A single-block Bloom filter. Only used when testing HFile format
|
||||
// version 1.
|
||||
int tooBig = conf.getInt(IO_STOREFILE_BLOOM_MAX_KEYS,
|
||||
128 * 1000 * 1000);
|
||||
|
||||
if (maxKeys <= 0) {
|
||||
LOG.warn("Invalid maximum number of keys specified: " + maxKeys
|
||||
+ ", not using Bloom filter");
|
||||
return null;
|
||||
} else if (maxKeys < tooBig) {
|
||||
BloomFilterWriter bloom = new ByteBloomFilter((int) maxKeys, err,
|
||||
Hash.getHashType(conf), maxFold);
|
||||
bloom.allocBloom();
|
||||
return bloom;
|
||||
} else {
|
||||
LOG.debug("Skipping bloom filter because max keysize too large: "
|
||||
+ maxKeys);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
// In case of compound Bloom filters we ignore the maxKeys hint.
|
||||
CompoundBloomFilterWriter bloomWriter = new CompoundBloomFilterWriter(getBloomBlockSize(conf),
|
||||
err, Hash.getHashType(conf), maxFold, cacheConf.shouldCacheBloomsOnWrite(),
|
||||
bloomType == BloomType.ROWCOL ? KeyValue.KEY_COMPARATOR : Bytes.BYTES_RAWCOMPARATOR);
|
||||
writer.addInlineBlockWriter(bloomWriter);
|
||||
return bloomWriter;
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -250,18 +227,12 @@ public final class BloomFilterFactory {
|
|||
|
||||
float err = getErrorRate(conf);
|
||||
|
||||
if (HFile.getFormatVersion(conf) > HFile.MIN_FORMAT_VERSION) {
|
||||
int maxFold = getMaxFold(conf);
|
||||
// In case of compound Bloom filters we ignore the maxKeys hint.
|
||||
CompoundBloomFilterWriter bloomWriter = new CompoundBloomFilterWriter(
|
||||
getBloomBlockSize(conf), err, Hash.getHashType(conf),
|
||||
maxFold,
|
||||
cacheConf.shouldCacheBloomsOnWrite(), Bytes.BYTES_RAWCOMPARATOR);
|
||||
writer.addInlineBlockWriter(bloomWriter);
|
||||
return bloomWriter;
|
||||
} else {
|
||||
LOG.info("Delete Family Bloom filter is not supported in HFile V1");
|
||||
return null;
|
||||
}
|
||||
int maxFold = getMaxFold(conf);
|
||||
// In case of compound Bloom filters we ignore the maxKeys hint.
|
||||
CompoundBloomFilterWriter bloomWriter = new CompoundBloomFilterWriter(getBloomBlockSize(conf),
|
||||
err, Hash.getHashType(conf), maxFold, cacheConf.shouldCacheBloomsOnWrite(),
|
||||
Bytes.BYTES_RAWCOMPARATOR);
|
||||
writer.addInlineBlockWriter(bloomWriter);
|
||||
return bloomWriter;
|
||||
}
|
||||
};
|
||||
|
|
|
@ -50,8 +50,11 @@ public class TestFixedFileTrailer {
|
|||
|
||||
private static final Log LOG = LogFactory.getLog(TestFixedFileTrailer.class);
|
||||
|
||||
/** The number of used fields by version. Indexed by version minus one. */
|
||||
private static final int[] NUM_FIELDS_BY_VERSION = new int[] { 9, 14 };
|
||||
/**
|
||||
* The number of used fields by version. Indexed by version minus two.
|
||||
* Min version that we support is V2
|
||||
*/
|
||||
private static final int[] NUM_FIELDS_BY_VERSION = new int[] { 14 };
|
||||
|
||||
private HBaseTestingUtility util = new HBaseTestingUtility();
|
||||
private FileSystem fs;
|
||||
|
@ -87,17 +90,11 @@ public class TestFixedFileTrailer {
|
|||
t.setDataIndexCount(3);
|
||||
t.setEntryCount(((long) Integer.MAX_VALUE) + 1);
|
||||
|
||||
if (version == 1) {
|
||||
t.setFileInfoOffset(876);
|
||||
}
|
||||
|
||||
if (version == 2) {
|
||||
t.setLastDataBlockOffset(291);
|
||||
t.setNumDataIndexLevels(3);
|
||||
t.setComparatorClass(KeyValue.KEY_COMPARATOR.getClass());
|
||||
t.setFirstDataBlockOffset(9081723123L); // Completely unrealistic.
|
||||
t.setUncompressedDataIndexSize(827398717L); // Something random.
|
||||
}
|
||||
t.setLastDataBlockOffset(291);
|
||||
t.setNumDataIndexLevels(3);
|
||||
t.setComparatorClass(KeyValue.KEY_COMPARATOR.getClass());
|
||||
t.setFirstDataBlockOffset(9081723123L); // Completely unrealistic.
|
||||
t.setUncompressedDataIndexSize(827398717L); // Something random.
|
||||
|
||||
t.setLoadOnOpenOffset(128);
|
||||
t.setMetaIndexCount(7);
|
||||
|
@ -162,7 +159,7 @@ public class TestFixedFileTrailer {
|
|||
|
||||
String trailerStr = t.toString();
|
||||
assertEquals("Invalid number of fields in the string representation "
|
||||
+ "of the trailer: " + trailerStr, NUM_FIELDS_BY_VERSION[version - 1],
|
||||
+ "of the trailer: " + trailerStr, NUM_FIELDS_BY_VERSION[version - 2],
|
||||
trailerStr.split(", ").length);
|
||||
assertEquals(trailerStr, t4.toString());
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue