HBASE-18754 Get rid of Writable from TimeRangeTracker

This commit is contained in:
Chia-Ping Tsai 2017-10-24 15:14:14 +08:00
parent 6ceb4a4f41
commit 2a28ff840e
9 changed files with 60 additions and 45 deletions

View File

@ -399,8 +399,7 @@ public class TestCellBasedHFileOutputFormat2 {
assertNotNull(range); assertNotNull(range);
// unmarshall and check values. // unmarshall and check values.
TimeRangeTracker timeRangeTracker = TimeRangeTracker.create(TimeRangeTracker.Type.SYNC); TimeRangeTracker timeRangeTracker = TimeRangeTracker.parseFrom(range);
Writables.copyWritable(range, timeRangeTracker);
LOG.info(timeRangeTracker.getMin() + LOG.info(timeRangeTracker.getMin() +
"...." + timeRangeTracker.getMax()); "...." + timeRangeTracker.getMax());
assertEquals(1000, timeRangeTracker.getMin()); assertEquals(1000, timeRangeTracker.getMin());

View File

@ -94,7 +94,6 @@ import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles;
import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.ReflectionUtils; import org.apache.hadoop.hbase.util.ReflectionUtils;
import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy; import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
@ -400,8 +399,7 @@ public class TestHFileOutputFormat2 {
assertNotNull(range); assertNotNull(range);
// unmarshall and check values. // unmarshall and check values.
TimeRangeTracker timeRangeTracker = TimeRangeTracker.create(TimeRangeTracker.Type.SYNC); TimeRangeTracker timeRangeTracker =TimeRangeTracker.parseFrom(range);
Writables.copyWritable(range, timeRangeTracker);
LOG.info(timeRangeTracker.getMin() + LOG.info(timeRangeTracker.getMin() +
"...." + timeRangeTracker.getMax()); "...." + timeRangeTracker.getMax());
assertEquals(1000, timeRangeTracker.getMin()); assertEquals(1000, timeRangeTracker.getMin());

View File

@ -118,6 +118,11 @@ message TimeRange {
optional uint64 to = 2; optional uint64 to = 2;
} }
message TimeRangeTracker {
optional uint64 from = 1;
optional uint64 to = 2;
}
/* ColumnFamily Specific TimeRange */ /* ColumnFamily Specific TimeRange */
message ColumnFamilyTimeRange { message ColumnFamilyTimeRange {
required bytes column_family = 1; required bytes column_family = 1;

View File

@ -539,7 +539,7 @@ public class HFilePrettyPrinter extends Configured implements Tool {
|| Bytes.equals(e.getKey(), HStoreFile.BULKLOAD_TIME_KEY)) { || Bytes.equals(e.getKey(), HStoreFile.BULKLOAD_TIME_KEY)) {
out.println(Bytes.toLong(e.getValue())); out.println(Bytes.toLong(e.getValue()));
} else if (Bytes.equals(e.getKey(), HStoreFile.TIMERANGE_KEY)) { } else if (Bytes.equals(e.getKey(), HStoreFile.TIMERANGE_KEY)) {
TimeRangeTracker timeRangeTracker = TimeRangeTracker.getTimeRangeTracker(e.getValue()); TimeRangeTracker timeRangeTracker = TimeRangeTracker.parseFrom(e.getValue());
out.println(timeRangeTracker.getMin() + "...." + timeRangeTracker.getMax()); out.println(timeRangeTracker.getMin() + "...." + timeRangeTracker.getMax());
} else if (Bytes.equals(e.getKey(), FileInfo.AVG_KEY_LEN) } else if (Bytes.equals(e.getKey(), FileInfo.AVG_KEY_LEN)
|| Bytes.equals(e.getKey(), FileInfo.AVG_VALUE_LEN) || Bytes.equals(e.getKey(), FileInfo.AVG_VALUE_LEN)

View File

@ -439,7 +439,8 @@ public class HStoreFile implements StoreFile {
reader.loadBloomfilter(BlockType.DELETE_FAMILY_BLOOM_META); reader.loadBloomfilter(BlockType.DELETE_FAMILY_BLOOM_META);
try { try {
this.reader.timeRange = TimeRangeTracker.getTimeRange(metadataMap.get(TIMERANGE_KEY)); byte[] data = metadataMap.get(TIMERANGE_KEY);
this.reader.timeRange = data == null ? null : TimeRangeTracker.parseFrom(data).toTimeRange();
} catch (IllegalArgumentException e) { } catch (IllegalArgumentException e) {
LOG.error("Error reading timestamp range data from meta -- " + LOG.error("Error reading timestamp range data from meta -- " +
"proceeding without", e); "proceeding without", e);

View File

@ -52,7 +52,6 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.RowBloomContext; import org.apache.hadoop.hbase.util.RowBloomContext;
import org.apache.hadoop.hbase.util.RowColBloomContext; import org.apache.hadoop.hbase.util.RowColBloomContext;
import org.apache.hadoop.io.WritableUtils;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions; import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
@ -184,7 +183,9 @@ public class StoreFileWriter implements CellSink, ShipperListener {
* Add TimestampRange and earliest put timestamp to Metadata * Add TimestampRange and earliest put timestamp to Metadata
*/ */
public void appendTrackedTimestampsToMetadata() throws IOException { public void appendTrackedTimestampsToMetadata() throws IOException {
appendFileInfo(TIMERANGE_KEY, WritableUtils.toByteArray(timeRangeTracker)); // TODO: The StoreFileReader always converts the byte[] to TimeRange
// via TimeRangeTracker, so we should write the serialization data of TimeRange directly.
appendFileInfo(TIMERANGE_KEY, TimeRangeTracker.toByteArray(timeRangeTracker));
appendFileInfo(EARLIEST_PUT_TS, Bytes.toBytes(earliestPutTs)); appendFileInfo(EARLIEST_PUT_TS, Bytes.toBytes(earliestPutTs));
} }

View File

@ -18,19 +18,20 @@
*/ */
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase.regionserver;
import java.io.DataInput; import java.io.ByteArrayInputStream;
import java.io.DataOutput; import java.io.DataInputStream;
import java.io.IOException; import java.io.IOException;
import java.util.concurrent.atomic.AtomicLong; import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.io.Writable;
import org.apache.yetus.audience.InterfaceAudience; import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting; import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
/** /**
* Stores minimum and maximum timestamp values, it is [minimumTimestamp, maximumTimestamp] in * Stores minimum and maximum timestamp values, it is [minimumTimestamp, maximumTimestamp] in
@ -44,7 +45,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
* at read time via an instance of {@link TimeRange} to test if Cells fit the StoreFile TimeRange. * at read time via an instance of {@link TimeRange} to test if Cells fit the StoreFile TimeRange.
*/ */
@InterfaceAudience.Private @InterfaceAudience.Private
public abstract class TimeRangeTracker implements Writable { public abstract class TimeRangeTracker {
public enum Type { public enum Type {
// thread-unsafe // thread-unsafe
@ -175,42 +176,41 @@ public abstract class TimeRangeTracker implements Writable {
*/ */
public abstract long getMax(); public abstract long getMax();
public void write(final DataOutput out) throws IOException {
out.writeLong(getMin());
out.writeLong(getMax());
}
public void readFields(final DataInput in) throws IOException {
setMin(in.readLong());
setMax(in.readLong());
}
@Override @Override
public String toString() { public String toString() {
return "[" + getMin() + "," + getMax() + "]"; return "[" + getMin() + "," + getMax() + "]";
} }
/** /**
* @param data the serialization data. It can't be null!
* @return An instance of NonSyncTimeRangeTracker filled w/ the content of serialized * @return An instance of NonSyncTimeRangeTracker filled w/ the content of serialized
* NonSyncTimeRangeTracker in <code>timeRangeTrackerBytes</code>. * NonSyncTimeRangeTracker in <code>timeRangeTrackerBytes</code>.
* @throws IOException * @throws IOException
*/ */
public static TimeRangeTracker getTimeRangeTracker(final byte [] timeRangeTrackerBytes) public static TimeRangeTracker parseFrom(final byte[] data) throws IOException {
throws IOException { return parseFrom(data, Type.NON_SYNC);
if (timeRangeTrackerBytes == null) return null;
TimeRangeTracker trt = TimeRangeTracker.create(Type.NON_SYNC);
Writables.copyWritable(timeRangeTrackerBytes, trt);
return trt;
} }
/** public static TimeRangeTracker parseFrom(final byte[] data, Type type) throws IOException {
* @return An instance of a TimeRange made from the serialized TimeRangeTracker passed in Preconditions.checkNotNull(data, "input data is null!");
* <code>timeRangeTrackerBytes</code>. if (ProtobufUtil.isPBMagicPrefix(data)) {
* @throws IOException int pblen = ProtobufUtil.lengthOfPBMagic();
*/ HBaseProtos.TimeRangeTracker.Builder builder = HBaseProtos.TimeRangeTracker.newBuilder();
static TimeRange getTimeRange(final byte [] timeRangeTrackerBytes) throws IOException { ProtobufUtil.mergeFrom(builder, data, pblen, data.length - pblen);
TimeRangeTracker trt = getTimeRangeTracker(timeRangeTrackerBytes); return TimeRangeTracker.create(type, builder.getFrom(), builder.getTo());
return trt == null? null: trt.toTimeRange(); } else {
DataInputStream in = new DataInputStream(new ByteArrayInputStream(data));
return TimeRangeTracker.create(type, in.readLong(), in.readLong());
}
}
public static byte[] toByteArray(TimeRangeTracker tracker) {
return ProtobufUtil.prependPBMagic(
HBaseProtos.TimeRangeTracker.newBuilder()
.setFrom(tracker.getMin())
.setTo(tracker.getMax())
.build()
.toByteArray());
} }
/** /**

View File

@ -197,8 +197,7 @@ public abstract class Compactor<T extends CellSink> {
} }
} }
tmp = fileInfo.get(TIMERANGE_KEY); tmp = fileInfo.get(TIMERANGE_KEY);
TimeRangeTracker trt = TimeRangeTracker.getTimeRangeTracker(tmp); fd.latestPutTs = tmp == null ? HConstants.LATEST_TIMESTAMP: TimeRangeTracker.parseFrom(tmp).getMax();
fd.latestPutTs = trt == null? HConstants.LATEST_TIMESTAMP: trt.getMax();
if (LOG.isDebugEnabled()) { if (LOG.isDebugEnabled()) {
LOG.debug("Compacting " + file + LOG.debug("Compacting " + file +
", keycount=" + keyCount + ", keycount=" + keyCount +

View File

@ -21,12 +21,14 @@ import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue; import static org.junit.Assert.assertTrue;
import java.io.DataOutputStream;
import java.io.IOException; import java.io.IOException;
import java.util.Optional;
import org.apache.hadoop.hbase.io.ByteArrayOutputStream;
import org.apache.hadoop.hbase.io.TimeRange; import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.testclassification.RegionServerTests; import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests; import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Writables;
import org.junit.Test; import org.junit.Test;
import org.junit.experimental.categories.Category; import org.junit.experimental.categories.Category;
@ -62,8 +64,8 @@ public class TestSimpleTimeRangeTracker {
@Test @Test
public void testTimeRangeTrackerNullIsSameAsTimeRangeNull() throws IOException { public void testTimeRangeTrackerNullIsSameAsTimeRangeNull() throws IOException {
TimeRangeTracker src = getTimeRangeTracker(1, 2); TimeRangeTracker src = getTimeRangeTracker(1, 2);
byte [] bytes = Writables.getBytes(src); byte[] bytes = TimeRangeTracker.toByteArray(src);
TimeRange tgt = TimeRangeTracker.getTimeRange(bytes); TimeRange tgt = TimeRangeTracker.parseFrom(bytes).toTimeRange();
assertEquals(src.getMin(), tgt.getMin()); assertEquals(src.getMin(), tgt.getMin());
assertEquals(src.getMax(), tgt.getMax()); assertEquals(src.getMax(), tgt.getMax());
} }
@ -71,12 +73,22 @@ public class TestSimpleTimeRangeTracker {
@Test @Test
public void testSerialization() throws IOException { public void testSerialization() throws IOException {
TimeRangeTracker src = getTimeRangeTracker(1, 2); TimeRangeTracker src = getTimeRangeTracker(1, 2);
TimeRangeTracker tgt = getTimeRangeTracker(); TimeRangeTracker tgt = TimeRangeTracker.parseFrom(TimeRangeTracker.toByteArray(src));
Writables.copyWritable(src, tgt);
assertEquals(src.getMin(), tgt.getMin()); assertEquals(src.getMin(), tgt.getMin());
assertEquals(src.getMax(), tgt.getMax()); assertEquals(src.getMax(), tgt.getMax());
} }
@Test
public void testLegacySerialization() throws IOException {
ByteArrayOutputStream data = new ByteArrayOutputStream();
DataOutputStream output = new DataOutputStream(data);
output.writeLong(100);
output.writeLong(200);
TimeRangeTracker tgt = TimeRangeTracker.parseFrom(data.toByteArray());
assertEquals(100, tgt.getMin());
assertEquals(200, tgt.getMax());
}
@Test @Test
public void testAlwaysDecrementingSetsMaximum() { public void testAlwaysDecrementingSetsMaximum() {
TimeRangeTracker trr = getTimeRangeTracker(); TimeRangeTracker trr = getTimeRangeTracker();