diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedHFileOutputFormat2.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedHFileOutputFormat2.java
index 28a7be1c877..25f45cb2496 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedHFileOutputFormat2.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestCellBasedHFileOutputFormat2.java
@@ -399,8 +399,7 @@ public class TestCellBasedHFileOutputFormat2 {
assertNotNull(range);
// unmarshall and check values.
- TimeRangeTracker timeRangeTracker = TimeRangeTracker.create(TimeRangeTracker.Type.SYNC);
- Writables.copyWritable(range, timeRangeTracker);
+ TimeRangeTracker timeRangeTracker = TimeRangeTracker.parseFrom(range);
LOG.info(timeRangeTracker.getMin() +
"...." + timeRangeTracker.getMax());
assertEquals(1000, timeRangeTracker.getMin());
diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
index ec19b2e49c9..1de230ebe45 100644
--- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
+++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestHFileOutputFormat2.java
@@ -94,7 +94,6 @@ import org.apache.hadoop.hbase.tool.LoadIncrementalHFiles;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.ReflectionUtils;
-import org.apache.hadoop.hbase.util.Writables;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy;
import org.apache.hadoop.hdfs.protocol.HdfsFileStatus;
@@ -400,8 +399,7 @@ public class TestHFileOutputFormat2 {
assertNotNull(range);
// unmarshall and check values.
- TimeRangeTracker timeRangeTracker = TimeRangeTracker.create(TimeRangeTracker.Type.SYNC);
- Writables.copyWritable(range, timeRangeTracker);
+ TimeRangeTracker timeRangeTracker =TimeRangeTracker.parseFrom(range);
LOG.info(timeRangeTracker.getMin() +
"...." + timeRangeTracker.getMax());
assertEquals(1000, timeRangeTracker.getMin());
diff --git a/hbase-protocol-shaded/src/main/protobuf/HBase.proto b/hbase-protocol-shaded/src/main/protobuf/HBase.proto
index 9de897a7be0..cc1ae8f756a 100644
--- a/hbase-protocol-shaded/src/main/protobuf/HBase.proto
+++ b/hbase-protocol-shaded/src/main/protobuf/HBase.proto
@@ -118,6 +118,11 @@ message TimeRange {
optional uint64 to = 2;
}
+message TimeRangeTracker {
+ optional uint64 from = 1;
+ optional uint64 to = 2;
+}
+
/* ColumnFamily Specific TimeRange */
message ColumnFamilyTimeRange {
required bytes column_family = 1;
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
index acb25fcb702..a800ef1e500 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFilePrettyPrinter.java
@@ -539,7 +539,7 @@ public class HFilePrettyPrinter extends Configured implements Tool {
|| Bytes.equals(e.getKey(), HStoreFile.BULKLOAD_TIME_KEY)) {
out.println(Bytes.toLong(e.getValue()));
} else if (Bytes.equals(e.getKey(), HStoreFile.TIMERANGE_KEY)) {
- TimeRangeTracker timeRangeTracker = TimeRangeTracker.getTimeRangeTracker(e.getValue());
+ TimeRangeTracker timeRangeTracker = TimeRangeTracker.parseFrom(e.getValue());
out.println(timeRangeTracker.getMin() + "...." + timeRangeTracker.getMax());
} else if (Bytes.equals(e.getKey(), FileInfo.AVG_KEY_LEN)
|| Bytes.equals(e.getKey(), FileInfo.AVG_VALUE_LEN)
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java
index 0ca01a5e234..b405c86aab2 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java
@@ -439,7 +439,8 @@ public class HStoreFile implements StoreFile {
reader.loadBloomfilter(BlockType.DELETE_FAMILY_BLOOM_META);
try {
- this.reader.timeRange = TimeRangeTracker.getTimeRange(metadataMap.get(TIMERANGE_KEY));
+ byte[] data = metadataMap.get(TIMERANGE_KEY);
+ this.reader.timeRange = data == null ? null : TimeRangeTracker.parseFrom(data).toTimeRange();
} catch (IllegalArgumentException e) {
LOG.error("Error reading timestamp range data from meta -- " +
"proceeding without", e);
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java
index 29bd3afe72c..5fc96ef2798 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFileWriter.java
@@ -52,7 +52,6 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.RowBloomContext;
import org.apache.hadoop.hbase.util.RowColBloomContext;
-import org.apache.hadoop.io.WritableUtils;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
@@ -184,7 +183,9 @@ public class StoreFileWriter implements CellSink, ShipperListener {
* Add TimestampRange and earliest put timestamp to Metadata
*/
public void appendTrackedTimestampsToMetadata() throws IOException {
- appendFileInfo(TIMERANGE_KEY, WritableUtils.toByteArray(timeRangeTracker));
+ // TODO: The StoreFileReader always converts the byte[] to TimeRange
+ // via TimeRangeTracker, so we should write the serialization data of TimeRange directly.
+ appendFileInfo(TIMERANGE_KEY, TimeRangeTracker.toByteArray(timeRangeTracker));
appendFileInfo(EARLIEST_PUT_TS, Bytes.toBytes(earliestPutTs));
}
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
index cefbd9ae955..08d9853802a 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/TimeRangeTracker.java
@@ -18,19 +18,20 @@
*/
package org.apache.hadoop.hbase.regionserver;
-import java.io.DataInput;
-import java.io.DataOutput;
+import java.io.ByteArrayInputStream;
+import java.io.DataInputStream;
import java.io.IOException;
import java.util.concurrent.atomic.AtomicLong;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.io.TimeRange;
-import org.apache.hadoop.hbase.util.Writables;
-import org.apache.hadoop.io.Writable;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTesting;
+import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
+import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
/**
* Stores minimum and maximum timestamp values, it is [minimumTimestamp, maximumTimestamp] in
@@ -44,7 +45,7 @@ import org.apache.hadoop.hbase.shaded.com.google.common.annotations.VisibleForTe
* at read time via an instance of {@link TimeRange} to test if Cells fit the StoreFile TimeRange.
*/
@InterfaceAudience.Private
-public abstract class TimeRangeTracker implements Writable {
+public abstract class TimeRangeTracker {
public enum Type {
// thread-unsafe
@@ -175,42 +176,41 @@ public abstract class TimeRangeTracker implements Writable {
*/
public abstract long getMax();
- public void write(final DataOutput out) throws IOException {
- out.writeLong(getMin());
- out.writeLong(getMax());
- }
-
- public void readFields(final DataInput in) throws IOException {
- setMin(in.readLong());
- setMax(in.readLong());
- }
-
@Override
public String toString() {
return "[" + getMin() + "," + getMax() + "]";
}
/**
+ * @param data the serialization data. It can't be null!
* @return An instance of NonSyncTimeRangeTracker filled w/ the content of serialized
* NonSyncTimeRangeTracker in timeRangeTrackerBytes
.
* @throws IOException
*/
- public static TimeRangeTracker getTimeRangeTracker(final byte [] timeRangeTrackerBytes)
- throws IOException {
- if (timeRangeTrackerBytes == null) return null;
- TimeRangeTracker trt = TimeRangeTracker.create(Type.NON_SYNC);
- Writables.copyWritable(timeRangeTrackerBytes, trt);
- return trt;
+ public static TimeRangeTracker parseFrom(final byte[] data) throws IOException {
+ return parseFrom(data, Type.NON_SYNC);
}
- /**
- * @return An instance of a TimeRange made from the serialized TimeRangeTracker passed in
- * timeRangeTrackerBytes
.
- * @throws IOException
- */
- static TimeRange getTimeRange(final byte [] timeRangeTrackerBytes) throws IOException {
- TimeRangeTracker trt = getTimeRangeTracker(timeRangeTrackerBytes);
- return trt == null? null: trt.toTimeRange();
+ public static TimeRangeTracker parseFrom(final byte[] data, Type type) throws IOException {
+ Preconditions.checkNotNull(data, "input data is null!");
+ if (ProtobufUtil.isPBMagicPrefix(data)) {
+ int pblen = ProtobufUtil.lengthOfPBMagic();
+ HBaseProtos.TimeRangeTracker.Builder builder = HBaseProtos.TimeRangeTracker.newBuilder();
+ ProtobufUtil.mergeFrom(builder, data, pblen, data.length - pblen);
+ return TimeRangeTracker.create(type, builder.getFrom(), builder.getTo());
+ } else {
+ DataInputStream in = new DataInputStream(new ByteArrayInputStream(data));
+ return TimeRangeTracker.create(type, in.readLong(), in.readLong());
+ }
+ }
+
+ public static byte[] toByteArray(TimeRangeTracker tracker) {
+ return ProtobufUtil.prependPBMagic(
+ HBaseProtos.TimeRangeTracker.newBuilder()
+ .setFrom(tracker.getMin())
+ .setTo(tracker.getMax())
+ .build()
+ .toByteArray());
}
/**
diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
index f9efd98261a..dc1f41c7727 100644
--- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
+++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/compactions/Compactor.java
@@ -197,8 +197,7 @@ public abstract class Compactor {
}
}
tmp = fileInfo.get(TIMERANGE_KEY);
- TimeRangeTracker trt = TimeRangeTracker.getTimeRangeTracker(tmp);
- fd.latestPutTs = trt == null? HConstants.LATEST_TIMESTAMP: trt.getMax();
+ fd.latestPutTs = tmp == null ? HConstants.LATEST_TIMESTAMP: TimeRangeTracker.parseFrom(tmp).getMax();
if (LOG.isDebugEnabled()) {
LOG.debug("Compacting " + file +
", keycount=" + keyCount +
diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSimpleTimeRangeTracker.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSimpleTimeRangeTracker.java
index 66b936ae22f..b660366c293 100644
--- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSimpleTimeRangeTracker.java
+++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestSimpleTimeRangeTracker.java
@@ -21,12 +21,14 @@ import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
+import java.io.DataOutputStream;
import java.io.IOException;
+import java.util.Optional;
+import org.apache.hadoop.hbase.io.ByteArrayOutputStream;
import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.testclassification.RegionServerTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
-import org.apache.hadoop.hbase.util.Writables;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@@ -62,8 +64,8 @@ public class TestSimpleTimeRangeTracker {
@Test
public void testTimeRangeTrackerNullIsSameAsTimeRangeNull() throws IOException {
TimeRangeTracker src = getTimeRangeTracker(1, 2);
- byte [] bytes = Writables.getBytes(src);
- TimeRange tgt = TimeRangeTracker.getTimeRange(bytes);
+ byte[] bytes = TimeRangeTracker.toByteArray(src);
+ TimeRange tgt = TimeRangeTracker.parseFrom(bytes).toTimeRange();
assertEquals(src.getMin(), tgt.getMin());
assertEquals(src.getMax(), tgt.getMax());
}
@@ -71,12 +73,22 @@ public class TestSimpleTimeRangeTracker {
@Test
public void testSerialization() throws IOException {
TimeRangeTracker src = getTimeRangeTracker(1, 2);
- TimeRangeTracker tgt = getTimeRangeTracker();
- Writables.copyWritable(src, tgt);
+ TimeRangeTracker tgt = TimeRangeTracker.parseFrom(TimeRangeTracker.toByteArray(src));
assertEquals(src.getMin(), tgt.getMin());
assertEquals(src.getMax(), tgt.getMax());
}
+ @Test
+ public void testLegacySerialization() throws IOException {
+ ByteArrayOutputStream data = new ByteArrayOutputStream();
+ DataOutputStream output = new DataOutputStream(data);
+ output.writeLong(100);
+ output.writeLong(200);
+ TimeRangeTracker tgt = TimeRangeTracker.parseFrom(data.toByteArray());
+ assertEquals(100, tgt.getMin());
+ assertEquals(200, tgt.getMax());
+ }
+
@Test
public void testAlwaysDecrementingSetsMaximum() {
TimeRangeTracker trr = getTimeRangeTracker();