HBASE-8034 record on-disk data size for store file and make it available during writing
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1456743 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
48debe39f0
commit
aa4092c83d
|
@ -266,4 +266,10 @@ public abstract class AbstractHFileWriter implements HFile.Writer {
|
||||||
HConstants.DATA_FILE_UMASK_KEY);
|
HConstants.DATA_FILE_UMASK_KEY);
|
||||||
return FSUtils.create(fs, path, perms);
|
return FSUtils.create(fs, path, perms);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public long getCurrentSize() throws IOException {
|
||||||
|
if (this.outputStream == null) return -1;
|
||||||
|
return this.outputStream.getPos();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -319,6 +319,11 @@ public class HFile {
|
||||||
* HFile V2.
|
* HFile V2.
|
||||||
*/
|
*/
|
||||||
void addDeleteFamilyBloomFilter(BloomFilterWriter bfw) throws IOException;
|
void addDeleteFamilyBloomFilter(BloomFilterWriter bfw) throws IOException;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return Currently written raw data size on disk.
|
||||||
|
*/
|
||||||
|
long getCurrentSize() throws IOException;
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -107,6 +107,9 @@ public class StoreFile {
|
||||||
public static final byte[] DELETE_FAMILY_COUNT =
|
public static final byte[] DELETE_FAMILY_COUNT =
|
||||||
Bytes.toBytes("DELETE_FAMILY_COUNT");
|
Bytes.toBytes("DELETE_FAMILY_COUNT");
|
||||||
|
|
||||||
|
/** See {@link #getEstimatedDiskDataSize()}. */
|
||||||
|
public static final byte[] DISK_DATA_SIZE_KEY = Bytes.toBytes("DISK_DATA_SIZE");
|
||||||
|
|
||||||
/** Last Bloom filter key in FileInfo */
|
/** Last Bloom filter key in FileInfo */
|
||||||
private static final byte[] LAST_BLOOM_KEY = Bytes.toBytes("LAST_BLOOM_KEY");
|
private static final byte[] LAST_BLOOM_KEY = Bytes.toBytes("LAST_BLOOM_KEY");
|
||||||
|
|
||||||
|
@ -149,6 +152,12 @@ public class StoreFile {
|
||||||
// whenever you get a Reader.
|
// whenever you get a Reader.
|
||||||
private AtomicBoolean majorCompaction = null;
|
private AtomicBoolean majorCompaction = null;
|
||||||
|
|
||||||
|
/** See {@link #getEstimatedDiskDataSize()}. */
|
||||||
|
private long diskDataSize;
|
||||||
|
|
||||||
|
/** See {@link #getEstimatedDiskDataSize()}. */
|
||||||
|
private static double DATA_SIZE_FRACTION_ESTIMATE = 0.98;
|
||||||
|
|
||||||
// If true, this file should not be included in minor compactions.
|
// If true, this file should not be included in minor compactions.
|
||||||
// It's set whenever you get a Reader.
|
// It's set whenever you get a Reader.
|
||||||
private boolean excludeFromMinorCompaction = false;
|
private boolean excludeFromMinorCompaction = false;
|
||||||
|
@ -287,6 +296,15 @@ public class StoreFile {
|
||||||
return modificationTimeStamp;
|
return modificationTimeStamp;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @return Estimated number of bytes taken by the data blocks of this file. Either the exact
|
||||||
|
* number written into the file metadata ({@link #DISK_DATA_SIZE_KEY}); or estimated as
|
||||||
|
* {@link #DATA_SIZE_FRACTION_ESTIMATE} of the file, if there's no such field (old files).
|
||||||
|
*/
|
||||||
|
public long getEstimatedDiskDataSize() {
|
||||||
|
return diskDataSize;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Return the largest memstoreTS found across all storefiles in
|
* Return the largest memstoreTS found across all storefiles in
|
||||||
* the given list. Store files that were created by a mapreduce
|
* the given list. Store files that were created by a mapreduce
|
||||||
|
@ -446,6 +464,12 @@ public class StoreFile {
|
||||||
"proceeding without", e);
|
"proceeding without", e);
|
||||||
this.reader.timeRangeTracker = null;
|
this.reader.timeRangeTracker = null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
b = metadataMap.get(DISK_DATA_SIZE_KEY);
|
||||||
|
// Estimate which fraction of the file is data if the file doesn't have this field.
|
||||||
|
this.diskDataSize = (b != null)
|
||||||
|
? Bytes.toLong(b) : (long)(this.reader.length() * DATA_SIZE_FRACTION_ESTIMATE);
|
||||||
|
|
||||||
return this.reader;
|
return this.reader;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1073,6 +1097,12 @@ public class StoreFile {
|
||||||
}
|
}
|
||||||
|
|
||||||
public void close() throws IOException {
|
public void close() throws IOException {
|
||||||
|
// Estimate data size in this file before blooms and the HFile tail blocks.
|
||||||
|
long currentSize = writer.getCurrentSize();
|
||||||
|
if (currentSize >= 0) {
|
||||||
|
writer.appendFileInfo(DISK_DATA_SIZE_KEY, Bytes.toBytes(currentSize));
|
||||||
|
}
|
||||||
|
|
||||||
boolean hasGeneralBloom = this.closeGeneralBloomFilter();
|
boolean hasGeneralBloom = this.closeGeneralBloomFilter();
|
||||||
boolean hasDeleteFamilyBloom = this.closeDeleteFamilyBloomFilter();
|
boolean hasDeleteFamilyBloom = this.closeDeleteFamilyBloomFilter();
|
||||||
|
|
||||||
|
@ -1095,6 +1125,10 @@ public class StoreFile {
|
||||||
HFile.Writer getHFileWriter() {
|
HFile.Writer getHFileWriter() {
|
||||||
return writer;
|
return writer;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public long getCurrentSize() throws IOException {
|
||||||
|
return writer.getCurrentSize();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
Loading…
Reference in New Issue