diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java index b2d3dcc5770..b18e70f7623 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogReader.java @@ -175,7 +175,7 @@ public class ProtobufLogReader extends ReaderBase { @Override protected void initAfterCompression() throws IOException { - WALCellCodec codec = new WALCellCodec(this.compressionContext); + WALCellCodec codec = WALCellCodec.create(this.conf, this.compressionContext); this.cellDecoder = codec.getDecoder(this.inputStream); if (this.hasCompression) { this.byteStringUncompressor = codec.getByteStringUncompressor(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java index a5fca766d17..c5521df5f1a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/ProtobufLogWriter.java @@ -80,7 +80,7 @@ public class ProtobufLogWriter implements HLog.Writer { output.write(ProtobufLogReader.PB_WAL_MAGIC); WALHeader.newBuilder().setHasCompression(doCompress).build().writeDelimitedTo(output); - WALCellCodec codec = new WALCellCodec(this.compressionContext); + WALCellCodec codec = WALCellCodec.create(conf, this.compressionContext); this.cellEncoder = codec.getEncoder(this.output); if (doCompress) { this.compressor = codec.getByteStringCompressor(); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java index 245abba2ddc..4491a92519b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/wal/WALCellCodec.java @@ -22,6 +22,7 @@ import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.KeyValue; import org.apache.hadoop.hbase.codec.BaseDecoder; @@ -29,6 +30,7 @@ import org.apache.hadoop.hbase.codec.BaseEncoder; import org.apache.hadoop.hbase.codec.Codec; import org.apache.hadoop.hbase.codec.KeyValueCodec; import org.apache.hadoop.hbase.util.Bytes; +import org.apache.hadoop.hbase.util.ReflectionUtils; import com.google.common.base.Preconditions; import com.google.protobuf.ByteString; @@ -39,6 +41,9 @@ import com.google.protobuf.ByteString; * This is a pure coincidence... they are independent and don't have to be compatible. */ public class WALCellCodec implements Codec { + /** Configuration key for the class to use when encoding cells in the WAL */ + public static final String WAL_CELL_CODEC_CLASS_KEY = "hbase.regionserver.wal.codec"; + private final CompressionContext compression; private final ByteStringUncompressor statelessUncompressor = new ByteStringUncompressor() { @Override @@ -47,10 +52,33 @@ public class WALCellCodec implements Codec { } }; - public WALCellCodec(CompressionContext compression) { + /** + * Default constructor - all subclasses must implement a constructor with this signature + * if they are to be dynamically loaded from the {@link Configuration}. + * @param conf configuration to configure this + * @param compression compression the codec should support, can be null to indicate no + * compression + */ + public WALCellCodec(Configuration conf, CompressionContext compression) { this.compression = compression; } + /** + * Create and setup a {@link WALCellCodec} from the {@link Configuration} and CompressionContext, + * if they have been specified. Fully prepares the codec for use. + * @param conf {@link Configuration} to read for the user-specified codec. If none is specified, + * uses a {@link WALCellCodec}. + * @param compression compression the codec should use + * @return a {@link WALCellCodec} ready for use. + * @throws UnsupportedOperationException if the codec cannot be instantiated + */ + public static WALCellCodec create(Configuration conf, CompressionContext compression) + throws UnsupportedOperationException { + String className = conf.get(WAL_CELL_CODEC_CLASS_KEY, WALCellCodec.class.getName()); + return ReflectionUtils.instantiateWithCustomCtor(className, new Class[] { Configuration.class, + CompressionContext.class }, new Object[] { conf, compression }); + } + public interface ByteStringCompressor { ByteString compress(byte[] data, Dictionary dict) throws IOException; } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestCustomWALCellCodec.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestCustomWALCellCodec.java new file mode 100644 index 00000000000..b992aca2671 --- /dev/null +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/wal/TestCustomWALCellCodec.java @@ -0,0 +1,62 @@ +/** + * Licensed to the Apache Software Foundation (ASF) under one + * or more contributor license agreements. See the NOTICE file + * distributed with this work for additional information + * regarding copyright ownership. The ASF licenses this file + * to you under the Apache License, Version 2.0 (the + * "License"); you may not use this file except in compliance + * with the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ +package org.apache.hadoop.hbase.regionserver.wal; + +import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertTrue; + +import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.SmallTests; +import org.junit.Test; +import org.junit.experimental.categories.Category; + +/** + * Test that we can create, load, setup our own custom codec + */ +@Category(SmallTests.class) +public class TestCustomWALCellCodec { + + public static class CustomWALCellCodec extends WALCellCodec { + public Configuration conf; + public CompressionContext context; + + public CustomWALCellCodec(Configuration conf, CompressionContext compression) { + super(conf, compression); + this.conf = conf; + this.context = compression; + } + } + + /** + * Test that a custom {@link WALCellCodec} will be completely setup when it is instantiated via + * {@link WALCellCodec} + * @throws Exception on failure + */ + @Test + public void testCreatePreparesCodec() throws Exception { + Configuration conf = new Configuration(false); + conf.setClass(WALCellCodec.WAL_CELL_CODEC_CLASS_KEY, CustomWALCellCodec.class, + WALCellCodec.class); + CustomWALCellCodec codec = (CustomWALCellCodec) WALCellCodec.create(conf, null); + assertEquals("Custom codec didn't get initialized with the right configuration!", conf, + codec.conf); + assertEquals("Custom codec didn't get initialized with the right compression context!", null, + codec.context); + } +} \ No newline at end of file