HBASE-19116 Currently the tail of hfiles with CellComparator* classname makes it so hbase1 can't open hbase2 written hfiles; fix
Serializing, if appropriate, write the hbase-1.x version of the Comparator to the hfile trailer so hbase-1.x files can read hbase-2.x hfiles (they are the same format).
This commit is contained in:
parent
02bba3b788
commit
8a3b4cdc67
|
@ -18,6 +18,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.io.hfile;
|
||||
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.DataInput;
|
||||
|
@ -185,34 +186,37 @@ public class FixedFileTrailer {
|
|||
baos.writeTo(outputStream);
|
||||
}
|
||||
|
||||
/**
|
||||
* Write trailer data as protobuf
|
||||
* @param outputStream
|
||||
* @throws IOException
|
||||
*/
|
||||
void serializeAsPB(DataOutputStream output) throws IOException {
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
@org.apache.hbase.thirdparty.com.google.common.annotations.VisibleForTesting
|
||||
HFileProtos.FileTrailerProto toProtobuf() {
|
||||
HFileProtos.FileTrailerProto.Builder builder = HFileProtos.FileTrailerProto.newBuilder()
|
||||
.setFileInfoOffset(fileInfoOffset)
|
||||
.setLoadOnOpenDataOffset(loadOnOpenDataOffset)
|
||||
.setUncompressedDataIndexSize(uncompressedDataIndexSize)
|
||||
.setTotalUncompressedBytes(totalUncompressedBytes)
|
||||
.setDataIndexCount(dataIndexCount)
|
||||
.setMetaIndexCount(metaIndexCount)
|
||||
.setEntryCount(entryCount)
|
||||
.setNumDataIndexLevels(numDataIndexLevels)
|
||||
.setFirstDataBlockOffset(firstDataBlockOffset)
|
||||
.setLastDataBlockOffset(lastDataBlockOffset)
|
||||
// TODO this is a classname encoded into an HFile's trailer. We are going to need to have
|
||||
// some compat code here.
|
||||
.setComparatorClassName(comparatorClassName)
|
||||
.setCompressionCodec(compressionCodec.ordinal());
|
||||
.setFileInfoOffset(fileInfoOffset)
|
||||
.setLoadOnOpenDataOffset(loadOnOpenDataOffset)
|
||||
.setUncompressedDataIndexSize(uncompressedDataIndexSize)
|
||||
.setTotalUncompressedBytes(totalUncompressedBytes)
|
||||
.setDataIndexCount(dataIndexCount)
|
||||
.setMetaIndexCount(metaIndexCount)
|
||||
.setEntryCount(entryCount)
|
||||
.setNumDataIndexLevels(numDataIndexLevels)
|
||||
.setFirstDataBlockOffset(firstDataBlockOffset)
|
||||
.setLastDataBlockOffset(lastDataBlockOffset)
|
||||
.setComparatorClassName(getHBase1CompatibleName(comparatorClassName))
|
||||
.setCompressionCodec(compressionCodec.ordinal());
|
||||
if (encryptionKey != null) {
|
||||
builder.setEncryptionKey(UnsafeByteOperations.unsafeWrap(encryptionKey));
|
||||
}
|
||||
return builder.build();
|
||||
}
|
||||
|
||||
/**
|
||||
* Write trailer data as protobuf.
|
||||
* NOTE: we run a translation on the comparator name and will serialize the old hbase-1.x where
|
||||
* it makes sense. See {@link #getHBase1CompatibleName(String)}.
|
||||
*/
|
||||
void serializeAsPB(DataOutputStream output) throws IOException {
|
||||
ByteArrayOutputStream baos = new ByteArrayOutputStream();
|
||||
// We need this extra copy unfortunately to determine the final size of the
|
||||
// delimited output, see use of baos.size() below.
|
||||
builder.build().writeDelimitedTo(baos);
|
||||
toProtobuf().writeDelimitedTo(baos);
|
||||
baos.writeTo(output);
|
||||
// Pad to make up the difference between variable PB encoding length and the
|
||||
// length when encoded as writable under earlier V2 formats. Failure to pad
|
||||
|
@ -298,8 +302,6 @@ public class FixedFileTrailer {
|
|||
lastDataBlockOffset = trailerProto.getLastDataBlockOffset();
|
||||
}
|
||||
if (trailerProto.hasComparatorClassName()) {
|
||||
// TODO this is a classname encoded into an HFile's trailer. We are going to need to have
|
||||
// some compat code here.
|
||||
setComparatorClass(getComparatorClass(trailerProto.getComparatorClassName()));
|
||||
}
|
||||
if (trailerProto.hasCompressionCodec()) {
|
||||
|
@ -548,14 +550,46 @@ public class FixedFileTrailer {
|
|||
CellComparator comp = klass.getDeclaredConstructor().newInstance();
|
||||
// if the name wasn't one of the legacy names, maybe its a legit new
|
||||
// kind of comparator.
|
||||
comparatorClassName = klass.getName();
|
||||
this.comparatorClassName = klass.getName();
|
||||
}
|
||||
|
||||
} catch (Exception e) {
|
||||
throw new RuntimeException("Comparator class " + klass.getName() + " is not instantiable", e);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* If a 'standard' Comparator, write the old name for the Comparator when we serialize rather
|
||||
* than the new name; writing the new name will make it so newly-written hfiles are not parseable
|
||||
* by hbase-1.x, a facility we'd like to preserve across rolling upgrade and hbase-1.x clusters
|
||||
* reading hbase-2.x produce.
|
||||
*
|
||||
* The Comparators in hbase-2.x work the same as they did in hbase-1.x; they compare
|
||||
* KeyValues. In hbase-2.x they were renamed making use of the more generic 'Cell'
|
||||
* nomenclature to indicate that we intend to move away from KeyValues post hbase-2. A naming
|
||||
* change is not reason enough to make it so hbase-1.x cannot read hbase-2.x files given the
|
||||
* structure goes unchanged (hfile v3). So, lets write the old names for Comparators into the
|
||||
* hfile tails in hbase-2. Here is where we do the translation.
|
||||
* {@link #getComparatorClass(String)} does translation going the other way.
|
||||
*
|
||||
* <p>The translation is done on the serialized Protobuf only.</p>
|
||||
*
|
||||
* @param comparator String class name of the Comparator used in this hfile.
|
||||
* @return What to store in the trailer as our comparator name.
|
||||
* @since hbase-2.0.0.
|
||||
* @deprecated Since hbase-2.0.0. Will be removed in hbase-3.0.0.
|
||||
* @see #getComparatorClass(String)
|
||||
*/
|
||||
@Deprecated
|
||||
private String getHBase1CompatibleName(final String comparator) {
|
||||
if (comparator.equals(CellComparatorImpl.class.getName())) {
|
||||
return KeyValue.COMPARATOR.getClass().getName();
|
||||
}
|
||||
if (comparator.equals(MetaCellComparator.class.getName())) {
|
||||
return KeyValue.META_COMPARATOR.getClass().getName();
|
||||
}
|
||||
return comparator;
|
||||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
private static Class<? extends CellComparator> getComparatorClass(String comparatorClassName)
|
||||
throws IOException {
|
||||
|
|
|
@ -33,8 +33,11 @@ import org.apache.hadoop.fs.FSDataInputStream;
|
|||
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.*;
|
||||
import org.apache.hadoop.hbase.CellComparatorImpl;
|
||||
import org.apache.hadoop.hbase.HBaseClassTestRule;
|
||||
import org.apache.hadoop.hbase.HBaseTestingUtility;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.shaded.protobuf.generated.HFileProtos;
|
||||
import org.apache.hadoop.hbase.testclassification.IOTests;
|
||||
import org.apache.hadoop.hbase.testclassification.SmallTests;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
|
@ -92,6 +95,19 @@ public class TestFixedFileTrailer {
|
|||
fs = FileSystem.get(util.getConfiguration());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testComparatorIsHBase1Compatible() {
|
||||
FixedFileTrailer t = new FixedFileTrailer(version, HFileReaderImpl.PBUF_TRAILER_MINOR_VERSION);
|
||||
t.setComparatorClass(CellComparatorImpl.COMPARATOR.getClass());
|
||||
assertEquals(CellComparatorImpl.COMPARATOR.getClass().getName(), t.getComparatorClassName());
|
||||
HFileProtos.FileTrailerProto pb = t.toProtobuf();
|
||||
assertEquals(KeyValue.COMPARATOR.getClass().getName(), pb.getComparatorClassName());
|
||||
t.setComparatorClass(CellComparatorImpl.MetaCellComparator.META_COMPARATOR.getClass());
|
||||
pb = t.toProtobuf();
|
||||
assertEquals(KeyValue.META_COMPARATOR.getClass().getName(),
|
||||
pb.getComparatorClassName());
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testTrailer() throws IOException {
|
||||
FixedFileTrailer t = new FixedFileTrailer(version,
|
||||
|
|
Loading…
Reference in New Issue