HBASE-18754 Get rid of Writable from TimeRangeTracker

This commit is contained in:
Chia-Ping Tsai 2017-10-24 15:14:14 +08:00
parent 6ceb4a4f41
commit 2a28ff840e
9 changed files with 60 additions and 45 deletions

View File

@ -399,8 +399,7 @@ public class TestCellBasedHFileOutputFormat2 {
assertNotNull(range);
// unmarshall and check values.
TimeRangeTracker timeRangeTracker = TimeRangeTracker.create(TimeRangeTracker.Type.SYNC);
Writables.copyWritable(range, timeRangeTracker);
TimeRangeTracker timeRangeTracker = TimeRangeTracker.parseFrom(range);
LOG.info(timeRangeTracker.getMin() +
"...." + timeRangeTracker.getMax());
assertEquals(1000, timeRangeTracker.getMin());

View File

@ -94,7 +94,6 @@ import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.ReflectionUtils;
import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
@ -400,8 +399,7 @@ public class TestHFileOutputFormat2 {
assertNotNull(range);
// unmarshall and check values.
TimeRangeTracker timeRangeTracker = TimeRangeTracker.create(TimeRangeTracker.Type.SYNC);
Writables.copyWritable(range, timeRangeTracker);
TimeRangeTracker timeRangeTracker =TimeRangeTracker.parseFrom(range);
LOG.info(timeRangeTracker.getMin() +
"...." + timeRangeTracker.getMax());
assertEquals(1000, timeRangeTracker.getMin());

View File

@ -118,6 +118,11 @@ message TimeRange {
optional uint64 to = 2;
}
message TimeRangeTracker {
optional uint64 from = 1;
optional uint64 to = 2;
}
/* ColumnFamily Specific TimeRange */
message ColumnFamilyTimeRange {
required bytes column_family = 1;

View File

@ -539,7 +539,7 @@ public class HFilePrettyPrinter extends Configured implements Tool {
|| Bytes.equals(e.getKey(), HStoreFile.BULKLOAD_TIME_KEY)) {
out.println(Bytes.toLong(e.getValue()));
} else if (Bytes.equals(e.getKey(), HStoreFile.TIMERANGE_KEY)) {
TimeRangeTracker timeRangeTracker = TimeRangeTracker.getTimeRangeTracker(e.getValue());
TimeRangeTracker timeRangeTracker = TimeRangeTracker.parseFrom(e.getValue());
out.println(timeRangeTracker.getMin() + "...." + timeRangeTracker.getMax());
} else if (Bytes.equals(e.getKey(), FileInfo.AVG_KEY_LEN)
|| Bytes.equals(e.getKey(), FileInfo.AVG_VALUE_LEN)

View File

@ -439,7 +439,8 @@ public class HStoreFile implements StoreFile {
reader.loadBloomfilter(BlockType.DELETE_FAMILY_BLOOM_META);
try {
this.reader.timeRange = TimeRangeTracker.getTimeRange(metadataMap.get(TIMERANGE_KEY));
byte[] data = metadataMap.get(TIMERANGE_KEY);
this.reader.timeRange = data == null ? null : TimeRangeTracker.parseFrom(data).toTimeRange();
} catch (IllegalArgumentException e) {
LOG.error("Error reading timestamp range data from meta -- " +
"proceeding without", e);

View File

@ -52,7 +52,6 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.RowBloomContext;
import org.apache.hadoop.hbase.util.RowColBloomContext;
import org.apache.hadoop.io.WritableUtils;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
@ -184,7 +183,9 @@ public class StoreFileWriter implements CellSink, ShipperListener {
* Add TimestampRange and earliest put timestamp to Metadata
*/
public void appendTrackedTimestampsToMetadata() throws IOException {
appendFileInfo(TIMERANGE_KEY, WritableUtils.toByteArray(timeRangeTracker));
// TODO: The StoreFileReader always converts the byte[] to TimeRange
// via TimeRangeTracker, so we should write the serialization data of TimeRange directly.
appendFileInfo(TIMERANGE_KEY, TimeRangeTracker.toByteArray(timeRangeTracker));
appendFileInfo(EARLIEST_PUT_TS, Bytes.toBytes(earliestPutTs));
}

View File

@ -18,19 +18,20 @@
*/
package org.apache.hadoop.hbase.regionserver;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.ByteArrayInputStream;
import java.io.DataInputStream;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.io.Writable;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
/**
* Stores minimum and maximum timestamp values, it is [minimumTimestamp, maximumTimestamp] in
@ -44,7 +45,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
* at read time via an instance of {@link TimeRange} to test if Cells fit the StoreFile TimeRange.
*/
@InterfaceAudience.Private
public abstract class TimeRangeTracker implements Writable {
public abstract class TimeRangeTracker {
public enum Type {
// thread-unsafe
@ -175,42 +176,41 @@ public abstract class TimeRangeTracker implements Writable {
*/
public abstract long getMax();
public void write(final DataOutput out) throws IOException {
out.writeLong(getMin());
out.writeLong(getMax());
}
public void readFields(final DataInput in) throws IOException {
setMin(in.readLong());
setMax(in.readLong());
}
@Override
public String toString() {
return "[" + getMin() + "," + getMax() + "]";
}
/**
* @param data the serialization data. It can't be null!
* @return An instance of NonSyncTimeRangeTracker filled w/ the content of serialized
* NonSyncTimeRangeTracker in <code>timeRangeTrackerBytes</code>.
* @throws IOException
*/
public static TimeRangeTracker getTimeRangeTracker(final byte [] timeRangeTrackerBytes)
throws IOException {
if (timeRangeTrackerBytes == null) return null;
TimeRangeTracker trt = TimeRangeTracker.create(Type.NON_SYNC);
Writables.copyWritable(timeRangeTrackerBytes, trt);
return trt;
public static TimeRangeTracker parseFrom(final byte[] data) throws IOException {
return parseFrom(data, Type.NON_SYNC);
}
/**
* @return An instance of a TimeRange made from the serialized TimeRangeTracker passed in
* <code>timeRangeTrackerBytes</code>.
* @throws IOException
*/
static TimeRange getTimeRange(final byte [] timeRangeTrackerBytes) throws IOException {
TimeRangeTracker trt = getTimeRangeTracker(timeRangeTrackerBytes);
return trt == null? null: trt.toTimeRange();
public static TimeRangeTracker parseFrom(final byte[] data, Type type) throws IOException {
Preconditions.checkNotNull(data, "input data is null!");
if (ProtobufUtil.isPBMagicPrefix(data)) {
int pblen = ProtobufUtil.lengthOfPBMagic();
HBaseProtos.TimeRangeTracker.Builder builder = HBaseProtos.TimeRangeTracker.newBuilder();
ProtobufUtil.mergeFrom(builder, data, pblen, data.length - pblen);
return TimeRangeTracker.create(type, builder.getFrom(), builder.getTo());
} else {
DataInputStream in = new DataInputStream(new ByteArrayInputStream(data));
return TimeRangeTracker.create(type, in.readLong(), in.readLong());
}
}
public static byte[] toByteArray(TimeRangeTracker tracker) {
return ProtobufUtil.prependPBMagic(
HBaseProtos.TimeRangeTracker.newBuilder()
.setFrom(tracker.getMin())
.setTo(tracker.getMax())
.build()
.toByteArray());
}
/**

View File

@ -197,8 +197,7 @@ public abstract class Compactor<T extends CellSink> {
}
}
tmp = fileInfo.get(TIMERANGE_KEY);
TimeRangeTracker trt = TimeRangeTracker.getTimeRangeTracker(tmp);
fd.latestPutTs = trt == null? HConstants.LATEST_TIMESTAMP: trt.getMax();
fd.latestPutTs = tmp == null ? HConstants.LATEST_TIMESTAMP: TimeRangeTracker.parseFrom(tmp).getMax();
if (LOG.isDebugEnabled()) {
LOG.debug("Compacting " + file +
", keycount=" + keyCount +

View File

@ -21,12 +21,14 @@ import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.DataOutputStream;
import java.io.IOException;
import java.util.Optional;
import org.apache.hadoop.hbase.io.ByteArrayOutputStream;
import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.apache.hadoop.hbase.util.Writables;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@ -62,8 +64,8 @@ public class TestSimpleTimeRangeTracker {
@Test
public void testTimeRangeTrackerNullIsSameAsTimeRangeNull() throws IOException {
TimeRangeTracker src = getTimeRangeTracker(1, 2);
byte [] bytes = Writables.getBytes(src);
TimeRange tgt = TimeRangeTracker.getTimeRange(bytes);
byte[] bytes = TimeRangeTracker.toByteArray(src);
TimeRange tgt = TimeRangeTracker.parseFrom(bytes).toTimeRange();
assertEquals(src.getMin(), tgt.getMin());
assertEquals(src.getMax(), tgt.getMax());
}
@ -71,12 +73,22 @@ public class TestSimpleTimeRangeTracker {
@Test
public void testSerialization() throws IOException {
TimeRangeTracker src = getTimeRangeTracker(1, 2);
TimeRangeTracker tgt = getTimeRangeTracker();
Writables.copyWritable(src, tgt);
TimeRangeTracker tgt = TimeRangeTracker.parseFrom(TimeRangeTracker.toByteArray(src));
assertEquals(src.getMin(), tgt.getMin());
assertEquals(src.getMax(), tgt.getMax());
}
@Test
public void testLegacySerialization() throws IOException {
ByteArrayOutputStream data = new ByteArrayOutputStream();
DataOutputStream output = new DataOutputStream(data);
output.writeLong(100);
output.writeLong(200);
TimeRangeTracker tgt = TimeRangeTracker.parseFrom(data.toByteArray());
assertEquals(100, tgt.getMin());
assertEquals(200, tgt.getMax());
}
@Test
public void testAlwaysDecrementingSetsMaximum() {
TimeRangeTracker trr = getTimeRangeTracker();