HADOOP-13061. Refactor erasure coders. Contributed by Kai Sasaki
This commit is contained in:
parent
bedfec0c10
commit
c023c74886
|
@ -21,9 +21,6 @@ package org.apache.hadoop.fs;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.classification.InterfaceStability;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
import org.apache.hadoop.http.lib.StaticUserWebFilter;
|
import org.apache.hadoop.http.lib.StaticUserWebFilter;
|
||||||
import org.apache.hadoop.io.erasurecode.rawcoder.RSRawErasureCoderFactory;
|
|
||||||
import org.apache.hadoop.io.erasurecode.rawcoder.RSRawErasureCoderFactoryLegacy;
|
|
||||||
import org.apache.hadoop.io.erasurecode.rawcoder.XORRawErasureCoderFactory;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class contains constants for configuration keys used
|
* This class contains constants for configuration keys used
|
||||||
|
@ -160,30 +157,7 @@ public class CommonConfigurationKeys extends CommonConfigurationKeysPublic {
|
||||||
public static final boolean IO_COMPRESSION_CODEC_LZ4_USELZ4HC_DEFAULT =
|
public static final boolean IO_COMPRESSION_CODEC_LZ4_USELZ4HC_DEFAULT =
|
||||||
false;
|
false;
|
||||||
|
|
||||||
/**
|
|
||||||
* Erasure Coding configuration family
|
|
||||||
*/
|
|
||||||
|
|
||||||
/** Supported erasure codec classes */
|
|
||||||
public static final String IO_ERASURECODE_CODECS_KEY = "io.erasurecode.codecs";
|
|
||||||
|
|
||||||
/** Raw coder factory for the RS default codec. */
|
|
||||||
public static final String IO_ERASURECODE_CODEC_RS_DEFAULT_RAWCODER_KEY =
|
|
||||||
"io.erasurecode.codec.rs-default.rawcoder";
|
|
||||||
public static final String IO_ERASURECODE_CODEC_RS_DEFAULT_RAWCODER_DEFAULT =
|
|
||||||
RSRawErasureCoderFactory.class.getCanonicalName();
|
|
||||||
|
|
||||||
/** Raw coder factory for the RS legacy codec. */
|
|
||||||
public static final String IO_ERASURECODE_CODEC_RS_LEGACY_RAWCODER_KEY =
|
|
||||||
"io.erasurecode.codec.rs-legacy.rawcoder";
|
|
||||||
public static final String IO_ERASURECODE_CODEC_RS_LEGACY_RAWCODER_DEFAULT =
|
|
||||||
RSRawErasureCoderFactoryLegacy.class.getCanonicalName();
|
|
||||||
|
|
||||||
/** Raw coder factory for the XOR codec. */
|
|
||||||
public static final String IO_ERASURECODE_CODEC_XOR_RAWCODER_KEY =
|
|
||||||
"io.erasurecode.codec.xor.rawcoder";
|
|
||||||
public static final String IO_ERASURECODE_CODEC_XOR_RAWCODER_DEFAULT =
|
|
||||||
XORRawErasureCoderFactory.class.getCanonicalName();
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Service Authorization
|
* Service Authorization
|
||||||
|
|
|
@ -20,19 +20,107 @@ package org.apache.hadoop.io.erasurecode;
|
||||||
import com.google.common.base.Preconditions;
|
import com.google.common.base.Preconditions;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
import org.apache.hadoop.io.erasurecode.codec.ErasureCodec;
|
||||||
|
import org.apache.hadoop.io.erasurecode.codec.HHXORErasureCodec;
|
||||||
|
import org.apache.hadoop.io.erasurecode.codec.RSErasureCodec;
|
||||||
|
import org.apache.hadoop.io.erasurecode.codec.XORErasureCodec;
|
||||||
|
import org.apache.hadoop.io.erasurecode.coder.ErasureDecoder;
|
||||||
|
import org.apache.hadoop.io.erasurecode.coder.ErasureEncoder;
|
||||||
|
import org.apache.hadoop.io.erasurecode.rawcoder.RSRawErasureCoderFactory;
|
||||||
|
import org.apache.hadoop.io.erasurecode.rawcoder.RSRawErasureCoderFactoryLegacy;
|
||||||
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureCoderFactory;
|
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureCoderFactory;
|
||||||
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
|
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
|
||||||
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
|
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
|
||||||
|
import org.apache.hadoop.io.erasurecode.rawcoder.XORRawErasureCoderFactory;
|
||||||
|
|
||||||
|
import java.lang.reflect.Constructor;
|
||||||
|
import java.lang.reflect.InvocationTargetException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A codec & coder utility to help create raw coders conveniently.
|
* A codec & coder utility to help create coders conveniently.
|
||||||
|
*
|
||||||
|
* {@link CodecUtil} includes erasure coder configurations key and default
|
||||||
|
* values such as coder class name and erasure codec option values included
|
||||||
|
* by {@link ErasureCodecOptions}. {@link ErasureEncoder} and
|
||||||
|
* {@link ErasureDecoder} are created by createEncoder and createDecoder
|
||||||
|
* respectively.{@link RawErasureEncoder} and {@link RawErasureDecoder} are
|
||||||
|
* are created by createRawEncoder and createRawDecoder.
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public final class CodecUtil {
|
public final class CodecUtil {
|
||||||
|
|
||||||
|
/** Erasure coder XOR codec. */
|
||||||
|
public static final String IO_ERASURECODE_CODEC_XOR_KEY =
|
||||||
|
"io.erasurecode.codec.xor";
|
||||||
|
public static final String IO_ERASURECODE_CODEC_XOR =
|
||||||
|
XORErasureCodec.class.getCanonicalName();
|
||||||
|
/** Erasure coder Reed-Solomon codec. */
|
||||||
|
public static final String IO_ERASURECODE_CODEC_RS_DEFAULT_KEY =
|
||||||
|
"io.erasurecode.codec.rs";
|
||||||
|
public static final String IO_ERASURECODE_CODEC_RS_DEFAULT =
|
||||||
|
RSErasureCodec.class.getCanonicalName();
|
||||||
|
/** Erasure coder hitch hiker XOR codec. */
|
||||||
|
public static final String IO_ERASURECODE_CODEC_HHXOR_KEY =
|
||||||
|
"io.erasurecode.codec.hhxor";
|
||||||
|
public static final String IO_ERASURECODE_CODEC_HHXOR =
|
||||||
|
HHXORErasureCodec.class.getCanonicalName();
|
||||||
|
|
||||||
|
/** Supported erasure codec classes. */
|
||||||
|
|
||||||
|
/** Raw coder factory for the RS default codec. */
|
||||||
|
public static final String IO_ERASURECODE_CODEC_RS_DEFAULT_RAWCODER_KEY =
|
||||||
|
"io.erasurecode.codec.rs-default.rawcoder";
|
||||||
|
public static final String IO_ERASURECODE_CODEC_RS_DEFAULT_RAWCODER_DEFAULT =
|
||||||
|
RSRawErasureCoderFactory.class.getCanonicalName();
|
||||||
|
|
||||||
|
/** Raw coder factory for the RS legacy codec. */
|
||||||
|
public static final String IO_ERASURECODE_CODEC_RS_LEGACY_RAWCODER_KEY =
|
||||||
|
"io.erasurecode.codec.rs-legacy.rawcoder";
|
||||||
|
public static final String IO_ERASURECODE_CODEC_RS_LEGACY_RAWCODER_DEFAULT =
|
||||||
|
RSRawErasureCoderFactoryLegacy.class.getCanonicalName();
|
||||||
|
|
||||||
|
/** Raw coder factory for the XOR codec. */
|
||||||
|
public static final String IO_ERASURECODE_CODEC_XOR_RAWCODER_KEY =
|
||||||
|
"io.erasurecode.codec.xor.rawcoder";
|
||||||
|
public static final String IO_ERASURECODE_CODEC_XOR_RAWCODER_DEFAULT =
|
||||||
|
XORRawErasureCoderFactory.class.getCanonicalName();
|
||||||
|
|
||||||
private CodecUtil() { }
|
private CodecUtil() { }
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create encoder corresponding to given codec.
|
||||||
|
* @param options Erasure codec options
|
||||||
|
* @return erasure encoder
|
||||||
|
*/
|
||||||
|
public static ErasureEncoder createEncoder(Configuration conf,
|
||||||
|
ErasureCodecOptions options) {
|
||||||
|
Preconditions.checkNotNull(conf);
|
||||||
|
Preconditions.checkNotNull(options);
|
||||||
|
|
||||||
|
String codecKey = getCodecClassName(conf,
|
||||||
|
options.getSchema().getCodecName());
|
||||||
|
|
||||||
|
ErasureCodec codec = createCodec(conf, codecKey, options);
|
||||||
|
return codec.createEncoder();
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Create decoder corresponding to given codec.
|
||||||
|
* @param options Erasure codec options
|
||||||
|
* @return erasure decoder
|
||||||
|
*/
|
||||||
|
public static ErasureDecoder createDecoder(Configuration conf,
|
||||||
|
ErasureCodecOptions options) {
|
||||||
|
Preconditions.checkNotNull(conf);
|
||||||
|
Preconditions.checkNotNull(options);
|
||||||
|
|
||||||
|
String codecKey = getCodecClassName(conf,
|
||||||
|
options.getSchema().getCodecName());
|
||||||
|
|
||||||
|
ErasureCodec codec = createCodec(conf, codecKey, options);
|
||||||
|
return codec.createDecoder();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create RS raw encoder according to configuration.
|
* Create RS raw encoder according to configuration.
|
||||||
* @param conf configuration
|
* @param conf configuration
|
||||||
|
@ -45,7 +133,7 @@ public final class CodecUtil {
|
||||||
Preconditions.checkNotNull(conf);
|
Preconditions.checkNotNull(conf);
|
||||||
Preconditions.checkNotNull(codec);
|
Preconditions.checkNotNull(codec);
|
||||||
|
|
||||||
String rawCoderFactoryKey = getFactNameFromCodec(conf, codec);
|
String rawCoderFactoryKey = getRawCoderFactNameFromCodec(conf, codec);
|
||||||
|
|
||||||
RawErasureCoderFactory fact = createRawCoderFactory(conf,
|
RawErasureCoderFactory fact = createRawCoderFactory(conf,
|
||||||
rawCoderFactoryKey);
|
rawCoderFactoryKey);
|
||||||
|
@ -65,7 +153,7 @@ public final class CodecUtil {
|
||||||
Preconditions.checkNotNull(conf);
|
Preconditions.checkNotNull(conf);
|
||||||
Preconditions.checkNotNull(codec);
|
Preconditions.checkNotNull(codec);
|
||||||
|
|
||||||
String rawCoderFactoryKey = getFactNameFromCodec(conf, codec);
|
String rawCoderFactoryKey = getRawCoderFactNameFromCodec(conf, codec);
|
||||||
|
|
||||||
RawErasureCoderFactory fact = createRawCoderFactory(conf,
|
RawErasureCoderFactory fact = createRawCoderFactory(conf,
|
||||||
rawCoderFactoryKey);
|
rawCoderFactoryKey);
|
||||||
|
@ -92,22 +180,21 @@ public final class CodecUtil {
|
||||||
return fact;
|
return fact;
|
||||||
}
|
}
|
||||||
|
|
||||||
private static String getFactNameFromCodec(Configuration conf, String codec) {
|
private static String getRawCoderFactNameFromCodec(Configuration conf,
|
||||||
|
String codec) {
|
||||||
switch (codec) {
|
switch (codec) {
|
||||||
case ErasureCodeConstants.RS_DEFAULT_CODEC_NAME:
|
case ErasureCodeConstants.RS_DEFAULT_CODEC_NAME:
|
||||||
return conf.get(
|
return conf.get(
|
||||||
CommonConfigurationKeys.IO_ERASURECODE_CODEC_RS_DEFAULT_RAWCODER_KEY,
|
IO_ERASURECODE_CODEC_RS_DEFAULT_RAWCODER_KEY,
|
||||||
CommonConfigurationKeys.
|
IO_ERASURECODE_CODEC_RS_DEFAULT_RAWCODER_DEFAULT);
|
||||||
IO_ERASURECODE_CODEC_RS_DEFAULT_RAWCODER_DEFAULT);
|
|
||||||
case ErasureCodeConstants.RS_LEGACY_CODEC_NAME:
|
case ErasureCodeConstants.RS_LEGACY_CODEC_NAME:
|
||||||
return conf.get(
|
return conf.get(
|
||||||
CommonConfigurationKeys.IO_ERASURECODE_CODEC_RS_LEGACY_RAWCODER_KEY,
|
IO_ERASURECODE_CODEC_RS_LEGACY_RAWCODER_KEY,
|
||||||
CommonConfigurationKeys.
|
IO_ERASURECODE_CODEC_RS_LEGACY_RAWCODER_DEFAULT);
|
||||||
IO_ERASURECODE_CODEC_RS_LEGACY_RAWCODER_DEFAULT);
|
|
||||||
case ErasureCodeConstants.XOR_CODEC_NAME:
|
case ErasureCodeConstants.XOR_CODEC_NAME:
|
||||||
return conf.get(
|
return conf.get(
|
||||||
CommonConfigurationKeys.IO_ERASURECODE_CODEC_XOR_RAWCODER_KEY,
|
IO_ERASURECODE_CODEC_XOR_RAWCODER_KEY,
|
||||||
CommonConfigurationKeys.IO_ERASURECODE_CODEC_XOR_RAWCODER_DEFAULT);
|
IO_ERASURECODE_CODEC_XOR_RAWCODER_DEFAULT);
|
||||||
default:
|
default:
|
||||||
// For custom codec, we throw exception if the factory is not configured
|
// For custom codec, we throw exception if the factory is not configured
|
||||||
String rawCoderKey = "io.erasurecode.codec." + codec + ".rawcoder";
|
String rawCoderKey = "io.erasurecode.codec." + codec + ".rawcoder";
|
||||||
|
@ -119,4 +206,59 @@ public final class CodecUtil {
|
||||||
return factName;
|
return factName;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private static ErasureCodec createCodec(Configuration conf,
|
||||||
|
String codecClassName, ErasureCodecOptions options) {
|
||||||
|
ErasureCodec codec = null;
|
||||||
|
try {
|
||||||
|
Class<? extends ErasureCodec> codecClass =
|
||||||
|
conf.getClassByName(codecClassName)
|
||||||
|
.asSubclass(ErasureCodec.class);
|
||||||
|
Constructor<? extends ErasureCodec> constructor
|
||||||
|
= codecClass.getConstructor(Configuration.class,
|
||||||
|
ErasureCodecOptions.class);
|
||||||
|
codec = constructor.newInstance(conf, options);
|
||||||
|
} catch (ClassNotFoundException | InstantiationException |
|
||||||
|
IllegalAccessException | NoSuchMethodException |
|
||||||
|
InvocationTargetException e) {
|
||||||
|
throw new RuntimeException("Failed to create erasure codec", e);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (codec == null) {
|
||||||
|
throw new RuntimeException("Failed to create erasure codec");
|
||||||
|
}
|
||||||
|
|
||||||
|
return codec;
|
||||||
|
}
|
||||||
|
|
||||||
|
private static String getCodecClassName(Configuration conf, String codec) {
|
||||||
|
switch (codec) {
|
||||||
|
case ErasureCodeConstants.RS_DEFAULT_CODEC_NAME:
|
||||||
|
return conf.get(
|
||||||
|
CodecUtil.IO_ERASURECODE_CODEC_RS_DEFAULT_KEY,
|
||||||
|
CodecUtil.IO_ERASURECODE_CODEC_RS_DEFAULT);
|
||||||
|
case ErasureCodeConstants.RS_LEGACY_CODEC_NAME:
|
||||||
|
//TODO:rs-legacy should be handled differently.
|
||||||
|
return conf.get(
|
||||||
|
CodecUtil.IO_ERASURECODE_CODEC_RS_DEFAULT_KEY,
|
||||||
|
CodecUtil.IO_ERASURECODE_CODEC_RS_DEFAULT);
|
||||||
|
case ErasureCodeConstants.XOR_CODEC_NAME:
|
||||||
|
return conf.get(
|
||||||
|
CodecUtil.IO_ERASURECODE_CODEC_XOR_KEY,
|
||||||
|
CodecUtil.IO_ERASURECODE_CODEC_XOR);
|
||||||
|
case ErasureCodeConstants.HHXOR_CODEC_NAME:
|
||||||
|
return conf.get(
|
||||||
|
CodecUtil.IO_ERASURECODE_CODEC_HHXOR_KEY,
|
||||||
|
CodecUtil.IO_ERASURECODE_CODEC_HHXOR);
|
||||||
|
default:
|
||||||
|
// For custom codec, we throw exception if the factory is not configured
|
||||||
|
String codecKey = "io.erasurecode.codec." + codec + ".coder";
|
||||||
|
String codecClass = conf.get(codecKey);
|
||||||
|
if (codecClass == null) {
|
||||||
|
throw new IllegalArgumentException("Codec not configured " +
|
||||||
|
"for custom codec " + codec);
|
||||||
|
}
|
||||||
|
return codecClass;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,12 +22,13 @@ package org.apache.hadoop.io.erasurecode;
|
||||||
*/
|
*/
|
||||||
public final class ErasureCodeConstants {
|
public final class ErasureCodeConstants {
|
||||||
|
|
||||||
private ErasureCodeConstants(){
|
private ErasureCodeConstants() {
|
||||||
}
|
}
|
||||||
|
|
||||||
public static final String RS_DEFAULT_CODEC_NAME = "rs-default";
|
public static final String RS_DEFAULT_CODEC_NAME = "rs-default";
|
||||||
public static final String RS_LEGACY_CODEC_NAME = "rs-legacy";
|
public static final String RS_LEGACY_CODEC_NAME = "rs-legacy";
|
||||||
public static final String XOR_CODEC_NAME = "xor";
|
public static final String XOR_CODEC_NAME = "xor";
|
||||||
|
public static final String HHXOR_CODEC_NAME = "hhxor";
|
||||||
|
|
||||||
public static final ECSchema RS_6_3_SCHEMA = new ECSchema(
|
public static final ECSchema RS_6_3_SCHEMA = new ECSchema(
|
||||||
RS_DEFAULT_CODEC_NAME, 6, 3);
|
RS_DEFAULT_CODEC_NAME, 6, 3);
|
||||||
|
|
|
@ -15,39 +15,23 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.io.erasurecode.codec;
|
|
||||||
|
package org.apache.hadoop.io.erasurecode;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.conf.Configured;
|
|
||||||
import org.apache.hadoop.io.erasurecode.ECSchema;
|
|
||||||
import org.apache.hadoop.io.erasurecode.grouper.BlockGrouper;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Abstract Erasure Codec that implements {@link ErasureCodec}.
|
* Erasure codec options.
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public abstract class AbstractErasureCodec extends Configured
|
public class ErasureCodecOptions {
|
||||||
implements ErasureCodec {
|
private ECSchema schema;
|
||||||
|
|
||||||
private final ECSchema schema;
|
public ErasureCodecOptions(ECSchema schema) {
|
||||||
|
|
||||||
public AbstractErasureCodec(ECSchema schema) {
|
|
||||||
this.schema = schema;
|
this.schema = schema;
|
||||||
}
|
}
|
||||||
|
|
||||||
public String getName() {
|
|
||||||
return schema.getCodecName();
|
|
||||||
}
|
|
||||||
|
|
||||||
public ECSchema getSchema() {
|
public ECSchema getSchema() {
|
||||||
return schema;
|
return schema;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
|
||||||
public BlockGrouper createBlockGrouper() {
|
|
||||||
BlockGrouper blockGrouper = new BlockGrouper();
|
|
||||||
blockGrouper.setSchema(getSchema());
|
|
||||||
|
|
||||||
return blockGrouper;
|
|
||||||
}
|
|
||||||
}
|
}
|
|
@ -0,0 +1,45 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.apache.hadoop.io.erasurecode.codec;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
|
import org.apache.hadoop.io.erasurecode.ErasureCodecOptions;
|
||||||
|
import org.apache.hadoop.io.erasurecode.coder.DummyErasureDecoder;
|
||||||
|
import org.apache.hadoop.io.erasurecode.coder.DummyErasureEncoder;
|
||||||
|
import org.apache.hadoop.io.erasurecode.coder.ErasureDecoder;
|
||||||
|
import org.apache.hadoop.io.erasurecode.coder.ErasureEncoder;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Dummy erasure coder does not real coding computing. This is used for only
|
||||||
|
* test or performance comparison with other erasure coders.
|
||||||
|
*/
|
||||||
|
public class DummyErasureCodec extends ErasureCodec {
|
||||||
|
public DummyErasureCodec(Configuration conf, ErasureCodecOptions options) {
|
||||||
|
super(conf, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ErasureEncoder createEncoder() {
|
||||||
|
return new DummyErasureEncoder(getCoderOptions());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ErasureDecoder createDecoder() {
|
||||||
|
return new DummyErasureDecoder(getCoderOptions());
|
||||||
|
}
|
||||||
|
}
|
|
@ -18,34 +18,76 @@
|
||||||
package org.apache.hadoop.io.erasurecode.codec;
|
package org.apache.hadoop.io.erasurecode.codec;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.conf.Configurable;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.io.erasurecode.coder.ErasureCoder;
|
import org.apache.hadoop.io.erasurecode.CodecUtil;
|
||||||
|
import org.apache.hadoop.io.erasurecode.ECSchema;
|
||||||
|
import org.apache.hadoop.io.erasurecode.ErasureCodecOptions;
|
||||||
|
import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
|
||||||
|
import org.apache.hadoop.io.erasurecode.coder.ErasureDecoder;
|
||||||
|
import org.apache.hadoop.io.erasurecode.coder.ErasureEncoder;
|
||||||
import org.apache.hadoop.io.erasurecode.grouper.BlockGrouper;
|
import org.apache.hadoop.io.erasurecode.grouper.BlockGrouper;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Erasure Codec API that's to cover the essential specific aspects of a code.
|
* Abstract Erasure Codec is defines the interface of each actual erasure
|
||||||
* Currently it cares only block grouper and erasure coder. In future we may
|
* codec classes.
|
||||||
* add more aspects here to make the behaviors customizable.
|
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public interface ErasureCodec extends Configurable {
|
public abstract class ErasureCodec {
|
||||||
|
|
||||||
|
private ECSchema schema;
|
||||||
|
private ErasureCodecOptions codecOptions;
|
||||||
|
private ErasureCoderOptions coderOptions;
|
||||||
|
|
||||||
|
public ErasureCodec(Configuration conf,
|
||||||
|
ErasureCodecOptions options) {
|
||||||
|
this.schema = options.getSchema();
|
||||||
|
this.codecOptions = options;
|
||||||
|
boolean allowChangeInputs = false;
|
||||||
|
this.coderOptions = new ErasureCoderOptions(schema.getNumDataUnits(),
|
||||||
|
schema.getNumParityUnits(), allowChangeInputs, false);
|
||||||
|
}
|
||||||
|
|
||||||
|
public String getName() {
|
||||||
|
return schema.getCodecName();
|
||||||
|
}
|
||||||
|
|
||||||
|
public ECSchema getSchema() {
|
||||||
|
return schema;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create block grouper
|
* Get a {@link ErasureCodecOptions}.
|
||||||
* @return block grouper
|
* @return erasure codec options
|
||||||
*/
|
*/
|
||||||
public BlockGrouper createBlockGrouper();
|
public ErasureCodecOptions getCodecOptions() {
|
||||||
|
return codecOptions;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected void setCodecOptions(ErasureCodecOptions options) {
|
||||||
|
this.codecOptions = options;
|
||||||
|
this.schema = options.getSchema();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Create Erasure Encoder
|
* Get a {@link ErasureCoderOptions}.
|
||||||
* @return erasure encoder
|
* @return erasure coder options
|
||||||
*/
|
*/
|
||||||
public ErasureCoder createEncoder();
|
public ErasureCoderOptions getCoderOptions() {
|
||||||
|
return coderOptions;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
protected void setCoderOptions(ErasureCoderOptions options) {
|
||||||
* Create Erasure Decoder
|
this.coderOptions = options;
|
||||||
* @return erasure decoder
|
}
|
||||||
*/
|
|
||||||
public ErasureCoder createDecoder();
|
|
||||||
|
|
||||||
|
public abstract ErasureEncoder createEncoder();
|
||||||
|
|
||||||
|
public abstract ErasureDecoder createDecoder();
|
||||||
|
|
||||||
|
public BlockGrouper createBlockGrouper() {
|
||||||
|
BlockGrouper blockGrouper = new BlockGrouper();
|
||||||
|
blockGrouper.setSchema(getSchema());
|
||||||
|
|
||||||
|
return blockGrouper;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,8 +18,10 @@
|
||||||
package org.apache.hadoop.io.erasurecode.codec;
|
package org.apache.hadoop.io.erasurecode.codec;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.io.erasurecode.ECSchema;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.io.erasurecode.coder.ErasureCoder;
|
import org.apache.hadoop.io.erasurecode.ErasureCodecOptions;
|
||||||
|
import org.apache.hadoop.io.erasurecode.coder.ErasureDecoder;
|
||||||
|
import org.apache.hadoop.io.erasurecode.coder.ErasureEncoder;
|
||||||
import org.apache.hadoop.io.erasurecode.coder.HHXORErasureDecoder;
|
import org.apache.hadoop.io.erasurecode.coder.HHXORErasureDecoder;
|
||||||
import org.apache.hadoop.io.erasurecode.coder.HHXORErasureEncoder;
|
import org.apache.hadoop.io.erasurecode.coder.HHXORErasureEncoder;
|
||||||
|
|
||||||
|
@ -27,19 +29,19 @@ import org.apache.hadoop.io.erasurecode.coder.HHXORErasureEncoder;
|
||||||
* A Hitchhiker-XOR erasure codec.
|
* A Hitchhiker-XOR erasure codec.
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class HHXORErasureCodec extends AbstractErasureCodec {
|
public class HHXORErasureCodec extends ErasureCodec {
|
||||||
|
|
||||||
public HHXORErasureCodec(ECSchema schema) {
|
public HHXORErasureCodec(Configuration conf, ErasureCodecOptions options) {
|
||||||
super(schema);
|
super(conf, options);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ErasureCoder createEncoder() {
|
public ErasureEncoder createEncoder() {
|
||||||
return new HHXORErasureEncoder(getSchema());
|
return new HHXORErasureEncoder(getCoderOptions());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ErasureCoder createDecoder() {
|
public ErasureDecoder createDecoder() {
|
||||||
return new HHXORErasureDecoder(getSchema());
|
return new HHXORErasureDecoder(getCoderOptions());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,8 +18,10 @@
|
||||||
package org.apache.hadoop.io.erasurecode.codec;
|
package org.apache.hadoop.io.erasurecode.codec;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.io.erasurecode.ECSchema;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.io.erasurecode.coder.ErasureCoder;
|
import org.apache.hadoop.io.erasurecode.ErasureCodecOptions;
|
||||||
|
import org.apache.hadoop.io.erasurecode.coder.ErasureDecoder;
|
||||||
|
import org.apache.hadoop.io.erasurecode.coder.ErasureEncoder;
|
||||||
import org.apache.hadoop.io.erasurecode.coder.RSErasureDecoder;
|
import org.apache.hadoop.io.erasurecode.coder.RSErasureDecoder;
|
||||||
import org.apache.hadoop.io.erasurecode.coder.RSErasureEncoder;
|
import org.apache.hadoop.io.erasurecode.coder.RSErasureEncoder;
|
||||||
|
|
||||||
|
@ -27,19 +29,19 @@ import org.apache.hadoop.io.erasurecode.coder.RSErasureEncoder;
|
||||||
* A Reed-Solomon erasure codec.
|
* A Reed-Solomon erasure codec.
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class RSErasureCodec extends AbstractErasureCodec {
|
public class RSErasureCodec extends ErasureCodec {
|
||||||
|
|
||||||
public RSErasureCodec(ECSchema schema) {
|
public RSErasureCodec(Configuration conf, ErasureCodecOptions options) {
|
||||||
super(schema);
|
super(conf, options);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ErasureCoder createEncoder() {
|
public ErasureEncoder createEncoder() {
|
||||||
return new RSErasureEncoder(getSchema());
|
return new RSErasureEncoder(getCoderOptions());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ErasureCoder createDecoder() {
|
public ErasureDecoder createDecoder() {
|
||||||
return new RSErasureDecoder(getSchema());
|
return new RSErasureDecoder(getCoderOptions());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,8 +18,10 @@
|
||||||
package org.apache.hadoop.io.erasurecode.codec;
|
package org.apache.hadoop.io.erasurecode.codec;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.io.erasurecode.ECSchema;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.io.erasurecode.coder.ErasureCoder;
|
import org.apache.hadoop.io.erasurecode.ErasureCodecOptions;
|
||||||
|
import org.apache.hadoop.io.erasurecode.coder.ErasureDecoder;
|
||||||
|
import org.apache.hadoop.io.erasurecode.coder.ErasureEncoder;
|
||||||
import org.apache.hadoop.io.erasurecode.coder.XORErasureDecoder;
|
import org.apache.hadoop.io.erasurecode.coder.XORErasureDecoder;
|
||||||
import org.apache.hadoop.io.erasurecode.coder.XORErasureEncoder;
|
import org.apache.hadoop.io.erasurecode.coder.XORErasureEncoder;
|
||||||
|
|
||||||
|
@ -27,20 +29,20 @@ import org.apache.hadoop.io.erasurecode.coder.XORErasureEncoder;
|
||||||
* A XOR erasure codec.
|
* A XOR erasure codec.
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class XORErasureCodec extends AbstractErasureCodec {
|
public class XORErasureCodec extends ErasureCodec {
|
||||||
|
|
||||||
public XORErasureCodec(ECSchema schema) {
|
public XORErasureCodec(Configuration conf, ErasureCodecOptions options) {
|
||||||
super(schema);
|
super(conf, options);
|
||||||
assert(schema.getNumParityUnits() == 1);
|
assert(options.getSchema().getNumParityUnits() == 1);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ErasureCoder createEncoder() {
|
public ErasureEncoder createEncoder() {
|
||||||
return new XORErasureEncoder(getSchema());
|
return new XORErasureEncoder(getCoderOptions());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ErasureCoder createDecoder() {
|
public ErasureDecoder createDecoder() {
|
||||||
return new XORErasureDecoder(getSchema());
|
return new XORErasureDecoder(getCoderOptions());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,28 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Erasure codec framework.
|
||||||
|
*/
|
||||||
|
@InterfaceAudience.Private
|
||||||
|
@InterfaceStability.Unstable
|
||||||
|
package org.apache.hadoop.io.erasurecode.codec;
|
||||||
|
|
||||||
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
|
|
|
@ -1,64 +0,0 @@
|
||||||
/**
|
|
||||||
* Licensed to the Apache Software Foundation (ASF) under one
|
|
||||||
* or more contributor license agreements. See the NOTICE file
|
|
||||||
* distributed with this work for additional information
|
|
||||||
* regarding copyright ownership. The ASF licenses this file
|
|
||||||
* to you under the Apache License, Version 2.0 (the
|
|
||||||
* "License"); you may not use this file except in compliance
|
|
||||||
* with the License. You may obtain a copy of the License at
|
|
||||||
*
|
|
||||||
* http://www.apache.org/licenses/LICENSE-2.0
|
|
||||||
*
|
|
||||||
* Unless required by applicable law or agreed to in writing, software
|
|
||||||
* distributed under the License is distributed on an "AS IS" BASIS,
|
|
||||||
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
||||||
* See the License for the specific language governing permissions and
|
|
||||||
* limitations under the License.
|
|
||||||
*/
|
|
||||||
package org.apache.hadoop.io.erasurecode.coder;
|
|
||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
|
||||||
import org.apache.hadoop.conf.Configured;
|
|
||||||
import org.apache.hadoop.io.erasurecode.ECSchema;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* A common class of basic facilities to be shared by encoder and decoder
|
|
||||||
*
|
|
||||||
* It implements the {@link ErasureCoder} interface.
|
|
||||||
*/
|
|
||||||
@InterfaceAudience.Private
|
|
||||||
public abstract class AbstractErasureCoder
|
|
||||||
extends Configured implements ErasureCoder {
|
|
||||||
|
|
||||||
private final int numDataUnits;
|
|
||||||
private final int numParityUnits;
|
|
||||||
|
|
||||||
public AbstractErasureCoder(int numDataUnits, int numParityUnits) {
|
|
||||||
this.numDataUnits = numDataUnits;
|
|
||||||
this.numParityUnits = numParityUnits;
|
|
||||||
}
|
|
||||||
|
|
||||||
public AbstractErasureCoder(ECSchema schema) {
|
|
||||||
this(schema.getNumDataUnits(), schema.getNumParityUnits());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getNumDataUnits() {
|
|
||||||
return numDataUnits;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public int getNumParityUnits() {
|
|
||||||
return numParityUnits;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public boolean preferDirectBuffer() {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
|
||||||
public void release() {
|
|
||||||
// Nothing to do by default
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -0,0 +1,46 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.apache.hadoop.io.erasurecode.coder;
|
||||||
|
|
||||||
|
import org.apache.hadoop.io.erasurecode.ECBlock;
|
||||||
|
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
|
||||||
|
import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
|
||||||
|
import org.apache.hadoop.io.erasurecode.rawcoder.DummyRawDecoder;
|
||||||
|
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Dummy erasure decoder does no real computation. Instead, it just returns
|
||||||
|
* zero bytes. This decoder can be used to isolate the performance issue to
|
||||||
|
* HDFS side logic instead of codec, and is intended for test only.
|
||||||
|
*/
|
||||||
|
public class DummyErasureDecoder extends ErasureDecoder {
|
||||||
|
public DummyErasureDecoder(ErasureCoderOptions options) {
|
||||||
|
super(options);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected ErasureCodingStep prepareDecodingStep(ECBlockGroup blockGroup) {
|
||||||
|
RawErasureDecoder rawDecoder = new DummyRawDecoder(getOptions());
|
||||||
|
|
||||||
|
ECBlock[] inputBlocks = getInputBlocks(blockGroup);
|
||||||
|
|
||||||
|
return new ErasureDecodingStep(inputBlocks,
|
||||||
|
getErasedIndexes(inputBlocks),
|
||||||
|
getOutputBlocks(blockGroup), rawDecoder);
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,45 @@
|
||||||
|
/**
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
package org.apache.hadoop.io.erasurecode.coder;
|
||||||
|
|
||||||
|
import org.apache.hadoop.io.erasurecode.ECBlock;
|
||||||
|
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
|
||||||
|
import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
|
||||||
|
import org.apache.hadoop.io.erasurecode.rawcoder.DummyRawEncoder;
|
||||||
|
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Dummy erasure encoder does no real computation. Instead, it just returns
|
||||||
|
* zero bytes. This decoder can be used to isolate the performance issue to
|
||||||
|
* HDFS side logic instead of codec, and is intended for test only.
|
||||||
|
*/
|
||||||
|
public class DummyErasureEncoder extends ErasureEncoder {
|
||||||
|
public DummyErasureEncoder(ErasureCoderOptions options) {
|
||||||
|
super(options);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
protected ErasureCodingStep prepareEncodingStep(ECBlockGroup blockGroup) {
|
||||||
|
RawErasureEncoder rawEncoder = new DummyRawEncoder(getOptions());
|
||||||
|
|
||||||
|
ECBlock[] inputBlocks = getInputBlocks(blockGroup);
|
||||||
|
|
||||||
|
return new ErasureEncodingStep(inputBlocks,
|
||||||
|
getOutputBlocks(blockGroup), rawEncoder);
|
||||||
|
}
|
||||||
|
}
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.io.erasurecode.coder;
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.conf.Configurable;
|
import org.apache.hadoop.conf.Configurable;
|
||||||
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
|
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
|
||||||
|
import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An erasure coder to perform encoding or decoding given a group. Generally it
|
* An erasure coder to perform encoding or decoding given a group. Generally it
|
||||||
|
@ -39,18 +40,25 @@ import org.apache.hadoop.io.erasurecode.ECBlockGroup;
|
||||||
public interface ErasureCoder extends Configurable {
|
public interface ErasureCoder extends Configurable {
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The number of data input units for the coding. A unit can be a byte,
|
* The number of data input units for the coding. A unit can be a byte, chunk
|
||||||
* chunk or buffer or even a block.
|
* or buffer or even a block.
|
||||||
* @return count of data input units
|
* @return count of data input units
|
||||||
*/
|
*/
|
||||||
public int getNumDataUnits();
|
int getNumDataUnits();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The number of parity output units for the coding. A unit can be a byte,
|
* The number of parity output units for the coding. A unit can be a byte,
|
||||||
* chunk, buffer or even a block.
|
* chunk, buffer or even a block.
|
||||||
* @return count of parity output units
|
* @return count of parity output units
|
||||||
*/
|
*/
|
||||||
public int getNumParityUnits();
|
int getNumParityUnits();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The options of erasure coder. This option is passed to
|
||||||
|
* raw erasure coder as it is.
|
||||||
|
* @return erasure coder options
|
||||||
|
*/
|
||||||
|
ErasureCoderOptions getOptions();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Calculate the encoding or decoding steps given a block blockGroup.
|
* Calculate the encoding or decoding steps given a block blockGroup.
|
||||||
|
@ -61,7 +69,7 @@ public interface ErasureCoder extends Configurable {
|
||||||
* @param blockGroup the erasure coding block group containing all necessary
|
* @param blockGroup the erasure coding block group containing all necessary
|
||||||
* information for codec calculation
|
* information for codec calculation
|
||||||
*/
|
*/
|
||||||
public ErasureCodingStep calculateCoding(ECBlockGroup blockGroup);
|
ErasureCodingStep calculateCoding(ECBlockGroup blockGroup);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tell if direct or off-heap buffer is preferred or not. It's for callers to
|
* Tell if direct or off-heap buffer is preferred or not. It's for callers to
|
||||||
|
@ -70,10 +78,11 @@ public interface ErasureCoder extends Configurable {
|
||||||
* @return true if direct buffer is preferred for performance consideration,
|
* @return true if direct buffer is preferred for performance consideration,
|
||||||
* otherwise false.
|
* otherwise false.
|
||||||
*/
|
*/
|
||||||
public boolean preferDirectBuffer();
|
boolean preferDirectBuffer();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Release the resources if any. Good chance to invoke RawErasureCoder#release.
|
* Release the resources if any. Good chance to invoke
|
||||||
|
* RawErasureCoder#release.
|
||||||
*/
|
*/
|
||||||
public void release();
|
void release();
|
||||||
}
|
}
|
||||||
|
|
|
@ -32,14 +32,14 @@ public interface ErasureCodingStep {
|
||||||
* or parity blocks.
|
* or parity blocks.
|
||||||
* @return input blocks
|
* @return input blocks
|
||||||
*/
|
*/
|
||||||
public ECBlock[] getInputBlocks();
|
ECBlock[] getInputBlocks();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Output blocks of writable buffers involved in this step, may be data
|
* Output blocks of writable buffers involved in this step, may be data
|
||||||
* blocks or parity blocks.
|
* blocks or parity blocks.
|
||||||
* @return output blocks
|
* @return output blocks
|
||||||
*/
|
*/
|
||||||
public ECBlock[] getOutputBlocks();
|
ECBlock[] getOutputBlocks();
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Perform encoding or decoding given the input chunks, and generated results
|
* Perform encoding or decoding given the input chunks, and generated results
|
||||||
|
@ -47,11 +47,11 @@ public interface ErasureCodingStep {
|
||||||
* @param inputChunks
|
* @param inputChunks
|
||||||
* @param outputChunks
|
* @param outputChunks
|
||||||
*/
|
*/
|
||||||
public void performCoding(ECChunk[] inputChunks, ECChunk[] outputChunks);
|
void performCoding(ECChunk[] inputChunks, ECChunk[] outputChunks);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Notify erasure coder that all the chunks of input blocks are processed so
|
* Notify erasure coder that all the chunks of input blocks are processed so
|
||||||
* the coder can be able to update internal states, considering next step.
|
* the coder can be able to update internal states, considering next step.
|
||||||
*/
|
*/
|
||||||
public void finish();
|
void finish();
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,9 +18,10 @@
|
||||||
package org.apache.hadoop.io.erasurecode.coder;
|
package org.apache.hadoop.io.erasurecode.coder;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
import org.apache.hadoop.conf.Configured;
|
||||||
import org.apache.hadoop.io.erasurecode.ECBlock;
|
import org.apache.hadoop.io.erasurecode.ECBlock;
|
||||||
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
|
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
|
||||||
import org.apache.hadoop.io.erasurecode.ECSchema;
|
import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An abstract erasure decoder that's to be inherited by new decoders.
|
* An abstract erasure decoder that's to be inherited by new decoders.
|
||||||
|
@ -28,14 +29,16 @@ import org.apache.hadoop.io.erasurecode.ECSchema;
|
||||||
* It implements the {@link ErasureCoder} interface.
|
* It implements the {@link ErasureCoder} interface.
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public abstract class AbstractErasureDecoder extends AbstractErasureCoder {
|
public abstract class ErasureDecoder extends Configured
|
||||||
|
implements ErasureCoder {
|
||||||
|
private final int numDataUnits;
|
||||||
|
private final int numParityUnits;
|
||||||
|
private final ErasureCoderOptions options;
|
||||||
|
|
||||||
public AbstractErasureDecoder(int numDataUnits, int numParityUnits) {
|
public ErasureDecoder(ErasureCoderOptions options) {
|
||||||
super(numDataUnits, numParityUnits);
|
this.options = options;
|
||||||
}
|
this.numDataUnits = options.getNumDataUnits();
|
||||||
|
this.numParityUnits = options.getNumParityUnits();
|
||||||
public AbstractErasureDecoder(ECSchema schema) {
|
|
||||||
super(schema);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -44,13 +47,20 @@ public abstract class AbstractErasureDecoder extends AbstractErasureCoder {
|
||||||
return prepareDecodingStep(blockGroup);
|
return prepareDecodingStep(blockGroup);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
@Override
|
||||||
* Perform decoding against a block blockGroup.
|
public int getNumDataUnits() {
|
||||||
* @param blockGroup
|
return this.numDataUnits;
|
||||||
* @return decoding step for caller to do the real work
|
}
|
||||||
*/
|
|
||||||
protected abstract ErasureCodingStep prepareDecodingStep(
|
@Override
|
||||||
ECBlockGroup blockGroup);
|
public int getNumParityUnits() {
|
||||||
|
return this.numParityUnits;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ErasureCoderOptions getOptions() {
|
||||||
|
return options;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* We have all the data blocks and parity blocks as input blocks for
|
* We have all the data blocks and parity blocks as input blocks for
|
||||||
|
@ -96,6 +106,24 @@ public abstract class AbstractErasureDecoder extends AbstractErasureCoder {
|
||||||
return outputBlocks;
|
return outputBlocks;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean preferDirectBuffer() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void release() {
|
||||||
|
// Nothing to do by default
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Perform decoding against a block blockGroup.
|
||||||
|
* @param blockGroup
|
||||||
|
* @return decoding step for caller to do the real work
|
||||||
|
*/
|
||||||
|
protected abstract ErasureCodingStep prepareDecodingStep(
|
||||||
|
ECBlockGroup blockGroup);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get the number of erased blocks in the block group.
|
* Get the number of erased blocks in the block group.
|
||||||
* @param blockGroup
|
* @param blockGroup
|
|
@ -27,7 +27,9 @@ import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
|
||||||
* a decoding step involved in the whole process of decoding a block group.
|
* a decoding step involved in the whole process of decoding a block group.
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class ErasureDecodingStep extends AbstractErasureCodingStep {
|
public class ErasureDecodingStep implements ErasureCodingStep {
|
||||||
|
private ECBlock[] inputBlocks;
|
||||||
|
private ECBlock[] outputBlocks;
|
||||||
private int[] erasedIndexes;
|
private int[] erasedIndexes;
|
||||||
private RawErasureDecoder rawDecoder;
|
private RawErasureDecoder rawDecoder;
|
||||||
|
|
||||||
|
@ -41,7 +43,8 @@ public class ErasureDecodingStep extends AbstractErasureCodingStep {
|
||||||
public ErasureDecodingStep(ECBlock[] inputBlocks, int[] erasedIndexes,
|
public ErasureDecodingStep(ECBlock[] inputBlocks, int[] erasedIndexes,
|
||||||
ECBlock[] outputBlocks,
|
ECBlock[] outputBlocks,
|
||||||
RawErasureDecoder rawDecoder) {
|
RawErasureDecoder rawDecoder) {
|
||||||
super(inputBlocks, outputBlocks);
|
this.inputBlocks = inputBlocks;
|
||||||
|
this.outputBlocks = outputBlocks;
|
||||||
this.erasedIndexes = erasedIndexes;
|
this.erasedIndexes = erasedIndexes;
|
||||||
this.rawDecoder = rawDecoder;
|
this.rawDecoder = rawDecoder;
|
||||||
}
|
}
|
||||||
|
@ -51,4 +54,18 @@ public class ErasureDecodingStep extends AbstractErasureCodingStep {
|
||||||
rawDecoder.decode(inputChunks, erasedIndexes, outputChunks);
|
rawDecoder.decode(inputChunks, erasedIndexes, outputChunks);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ECBlock[] getInputBlocks() {
|
||||||
|
return inputBlocks;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ECBlock[] getOutputBlocks() {
|
||||||
|
return outputBlocks;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void finish() {
|
||||||
|
// TODO: Finalize decoder if necessary
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,9 +18,10 @@
|
||||||
package org.apache.hadoop.io.erasurecode.coder;
|
package org.apache.hadoop.io.erasurecode.coder;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
|
import org.apache.hadoop.conf.Configured;
|
||||||
import org.apache.hadoop.io.erasurecode.ECBlock;
|
import org.apache.hadoop.io.erasurecode.ECBlock;
|
||||||
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
|
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
|
||||||
import org.apache.hadoop.io.erasurecode.ECSchema;
|
import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An abstract erasure encoder that's to be inherited by new encoders.
|
* An abstract erasure encoder that's to be inherited by new encoders.
|
||||||
|
@ -28,14 +29,17 @@ import org.apache.hadoop.io.erasurecode.ECSchema;
|
||||||
* It implements the {@link ErasureCoder} interface.
|
* It implements the {@link ErasureCoder} interface.
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public abstract class AbstractErasureEncoder extends AbstractErasureCoder {
|
public abstract class ErasureEncoder extends Configured
|
||||||
|
implements ErasureCoder {
|
||||||
|
|
||||||
public AbstractErasureEncoder(int numDataUnits, int numParityUnits) {
|
private final int numDataUnits;
|
||||||
super(numDataUnits, numParityUnits);
|
private final int numParityUnits;
|
||||||
}
|
private final ErasureCoderOptions options;
|
||||||
|
|
||||||
public AbstractErasureEncoder(ECSchema schema) {
|
public ErasureEncoder(ErasureCoderOptions options) {
|
||||||
super(schema);
|
this.options = options;
|
||||||
|
this.numDataUnits = options.getNumDataUnits();
|
||||||
|
this.numParityUnits = options.getNumParityUnits();
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -44,6 +48,39 @@ public abstract class AbstractErasureEncoder extends AbstractErasureCoder {
|
||||||
return prepareEncodingStep(blockGroup);
|
return prepareEncodingStep(blockGroup);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getNumDataUnits() {
|
||||||
|
return numDataUnits;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int getNumParityUnits() {
|
||||||
|
return numParityUnits;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ErasureCoderOptions getOptions() {
|
||||||
|
return options;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected ECBlock[] getInputBlocks(ECBlockGroup blockGroup) {
|
||||||
|
return blockGroup.getDataBlocks();
|
||||||
|
}
|
||||||
|
|
||||||
|
protected ECBlock[] getOutputBlocks(ECBlockGroup blockGroup) {
|
||||||
|
return blockGroup.getParityBlocks();
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean preferDirectBuffer() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void release() {
|
||||||
|
// Nothing to do by default
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Perform encoding against a block group.
|
* Perform encoding against a block group.
|
||||||
* @param blockGroup
|
* @param blockGroup
|
||||||
|
@ -51,12 +88,4 @@ public abstract class AbstractErasureEncoder extends AbstractErasureCoder {
|
||||||
*/
|
*/
|
||||||
protected abstract ErasureCodingStep prepareEncodingStep(
|
protected abstract ErasureCodingStep prepareEncodingStep(
|
||||||
ECBlockGroup blockGroup);
|
ECBlockGroup blockGroup);
|
||||||
|
|
||||||
protected ECBlock[] getInputBlocks(ECBlockGroup blockGroup) {
|
|
||||||
return blockGroup.getDataBlocks();
|
|
||||||
}
|
|
||||||
|
|
||||||
protected ECBlock[] getOutputBlocks(ECBlockGroup blockGroup) {
|
|
||||||
return blockGroup.getParityBlocks();
|
|
||||||
}
|
|
||||||
}
|
}
|
|
@ -27,8 +27,9 @@ import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
|
||||||
* an encoding step involved in the whole process of encoding a block group.
|
* an encoding step involved in the whole process of encoding a block group.
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class ErasureEncodingStep extends AbstractErasureCodingStep {
|
public class ErasureEncodingStep implements ErasureCodingStep {
|
||||||
|
private ECBlock[] inputBlocks;
|
||||||
|
private ECBlock[] outputBlocks;
|
||||||
private RawErasureEncoder rawEncoder;
|
private RawErasureEncoder rawEncoder;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -39,7 +40,8 @@ public class ErasureEncodingStep extends AbstractErasureCodingStep {
|
||||||
*/
|
*/
|
||||||
public ErasureEncodingStep(ECBlock[] inputBlocks, ECBlock[] outputBlocks,
|
public ErasureEncodingStep(ECBlock[] inputBlocks, ECBlock[] outputBlocks,
|
||||||
RawErasureEncoder rawEncoder) {
|
RawErasureEncoder rawEncoder) {
|
||||||
super(inputBlocks, outputBlocks);
|
this.inputBlocks = inputBlocks;
|
||||||
|
this.outputBlocks = outputBlocks;
|
||||||
this.rawEncoder = rawEncoder;
|
this.rawEncoder = rawEncoder;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -48,4 +50,18 @@ public class ErasureEncodingStep extends AbstractErasureCodingStep {
|
||||||
rawEncoder.encode(inputChunks, outputChunks);
|
rawEncoder.encode(inputChunks, outputChunks);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ECBlock[] getInputBlocks() {
|
||||||
|
return inputBlocks;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public ECBlock[] getOutputBlocks() {
|
||||||
|
return outputBlocks;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void finish() {
|
||||||
|
rawEncoder.release();
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,28 +21,36 @@ import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.io.erasurecode.ECBlock;
|
import org.apache.hadoop.io.erasurecode.ECBlock;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Abstract class for common facilities shared by {@link ErasureEncodingStep}
|
* Abstract class for Hitchhiker common facilities shared by
|
||||||
* and {@link ErasureDecodingStep}.
|
* {@link HHXORErasureEncodingStep}and {@link HHXORErasureDecodingStep}.
|
||||||
*
|
*
|
||||||
* It implements {@link ErasureEncodingStep}.
|
* It implements {@link ErasureCodingStep}.
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public abstract class AbstractErasureCodingStep implements ErasureCodingStep {
|
public abstract class HHErasureCodingStep
|
||||||
|
implements ErasureCodingStep {
|
||||||
|
|
||||||
private ECBlock[] inputBlocks;
|
private ECBlock[] inputBlocks;
|
||||||
private ECBlock[] outputBlocks;
|
private ECBlock[] outputBlocks;
|
||||||
|
|
||||||
|
private static final int SUB_PACKET_SIZE = 2;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor given input blocks and output blocks.
|
* Constructor given input blocks and output blocks.
|
||||||
|
*
|
||||||
* @param inputBlocks
|
* @param inputBlocks
|
||||||
* @param outputBlocks
|
* @param outputBlocks
|
||||||
*/
|
*/
|
||||||
public AbstractErasureCodingStep(ECBlock[] inputBlocks,
|
public HHErasureCodingStep(ECBlock[] inputBlocks,
|
||||||
ECBlock[] outputBlocks) {
|
ECBlock[] outputBlocks) {
|
||||||
this.inputBlocks = inputBlocks;
|
this.inputBlocks = inputBlocks;
|
||||||
this.outputBlocks = outputBlocks;
|
this.outputBlocks = outputBlocks;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
protected int getSubPacketSize() {
|
||||||
|
return SUB_PACKET_SIZE;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public ECBlock[] getInputBlocks() {
|
public ECBlock[] getInputBlocks() {
|
||||||
return inputBlocks;
|
return inputBlocks;
|
||||||
|
@ -55,7 +63,6 @@ public abstract class AbstractErasureCodingStep implements ErasureCodingStep {
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void finish() {
|
public void finish() {
|
||||||
// NOOP by default
|
// TODO: Finalize encoder/decoder if necessary
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
|
@ -21,7 +21,6 @@ import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.io.erasurecode.CodecUtil;
|
import org.apache.hadoop.io.erasurecode.CodecUtil;
|
||||||
import org.apache.hadoop.io.erasurecode.ECBlock;
|
import org.apache.hadoop.io.erasurecode.ECBlock;
|
||||||
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
|
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
|
||||||
import org.apache.hadoop.io.erasurecode.ECSchema;
|
|
||||||
import org.apache.hadoop.io.erasurecode.ErasureCodeConstants;
|
import org.apache.hadoop.io.erasurecode.ErasureCodeConstants;
|
||||||
import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
|
import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
|
||||||
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
|
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
|
||||||
|
@ -39,16 +38,12 @@ import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
|
||||||
* This is Hitchhiker-XOR erasure decoder that decodes a block group.
|
* This is Hitchhiker-XOR erasure decoder that decodes a block group.
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class HHXORErasureDecoder extends AbstractErasureDecoder {
|
public class HHXORErasureDecoder extends ErasureDecoder {
|
||||||
private RawErasureDecoder rsRawDecoder;
|
private RawErasureDecoder rsRawDecoder;
|
||||||
private RawErasureEncoder xorRawEncoder;
|
private RawErasureEncoder xorRawEncoder;
|
||||||
|
|
||||||
public HHXORErasureDecoder(int numDataUnits, int numParityUnits) {
|
public HHXORErasureDecoder(ErasureCoderOptions options) {
|
||||||
super(numDataUnits, numParityUnits);
|
super(options);
|
||||||
}
|
|
||||||
|
|
||||||
public HHXORErasureDecoder(ECSchema schema) {
|
|
||||||
super(schema);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -71,24 +66,25 @@ public class HHXORErasureDecoder extends AbstractErasureDecoder {
|
||||||
|
|
||||||
private RawErasureDecoder checkCreateRSRawDecoder() {
|
private RawErasureDecoder checkCreateRSRawDecoder() {
|
||||||
if (rsRawDecoder == null) {
|
if (rsRawDecoder == null) {
|
||||||
ErasureCoderOptions coderOptions = new ErasureCoderOptions(
|
|
||||||
getNumDataUnits(), getNumParityUnits());
|
|
||||||
rsRawDecoder = CodecUtil.createRawDecoder(getConf(),
|
rsRawDecoder = CodecUtil.createRawDecoder(getConf(),
|
||||||
ErasureCodeConstants.RS_DEFAULT_CODEC_NAME, coderOptions);
|
ErasureCodeConstants.RS_DEFAULT_CODEC_NAME, getOptions());
|
||||||
}
|
}
|
||||||
return rsRawDecoder;
|
return rsRawDecoder;
|
||||||
}
|
}
|
||||||
|
|
||||||
private RawErasureEncoder checkCreateXorRawEncoder() {
|
private RawErasureEncoder checkCreateXorRawEncoder() {
|
||||||
if (xorRawEncoder == null) {
|
if (xorRawEncoder == null) {
|
||||||
ErasureCoderOptions coderOptions = new ErasureCoderOptions(
|
|
||||||
getNumDataUnits(), getNumParityUnits());
|
|
||||||
xorRawEncoder = CodecUtil.createRawEncoder(getConf(),
|
xorRawEncoder = CodecUtil.createRawEncoder(getConf(),
|
||||||
ErasureCodeConstants.XOR_CODEC_NAME, coderOptions);
|
ErasureCodeConstants.XOR_CODEC_NAME, getOptions());
|
||||||
}
|
}
|
||||||
return xorRawEncoder;
|
return xorRawEncoder;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean preferDirectBuffer() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void release() {
|
public void release() {
|
||||||
if (rsRawDecoder != null) {
|
if (rsRawDecoder != null) {
|
||||||
|
|
|
@ -32,7 +32,7 @@ import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
|
||||||
* decoding a block group.
|
* decoding a block group.
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class HHXORErasureDecodingStep extends AbstractHHErasureCodingStep {
|
public class HHXORErasureDecodingStep extends HHErasureCodingStep {
|
||||||
private int pbIndex;
|
private int pbIndex;
|
||||||
private int[] piggyBackIndex;
|
private int[] piggyBackIndex;
|
||||||
private int[] piggyBackFullIndex;
|
private int[] piggyBackFullIndex;
|
||||||
|
|
|
@ -21,7 +21,6 @@ import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.io.erasurecode.CodecUtil;
|
import org.apache.hadoop.io.erasurecode.CodecUtil;
|
||||||
import org.apache.hadoop.io.erasurecode.ECBlock;
|
import org.apache.hadoop.io.erasurecode.ECBlock;
|
||||||
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
|
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
|
||||||
import org.apache.hadoop.io.erasurecode.ECSchema;
|
|
||||||
import org.apache.hadoop.io.erasurecode.ErasureCodeConstants;
|
import org.apache.hadoop.io.erasurecode.ErasureCodeConstants;
|
||||||
import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
|
import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
|
||||||
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
|
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
|
||||||
|
@ -38,16 +37,12 @@ import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
|
||||||
* This is Hitchhiker-XOR erasure encoder that encodes a block group.
|
* This is Hitchhiker-XOR erasure encoder that encodes a block group.
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class HHXORErasureEncoder extends AbstractErasureEncoder {
|
public class HHXORErasureEncoder extends ErasureEncoder {
|
||||||
private RawErasureEncoder rsRawEncoder;
|
private RawErasureEncoder rsRawEncoder;
|
||||||
private RawErasureEncoder xorRawEncoder;
|
private RawErasureEncoder xorRawEncoder;
|
||||||
|
|
||||||
public HHXORErasureEncoder(int numDataUnits, int numParityUnits) {
|
public HHXORErasureEncoder(ErasureCoderOptions options) {
|
||||||
super(numDataUnits, numParityUnits);
|
super(options);
|
||||||
}
|
|
||||||
|
|
||||||
public HHXORErasureEncoder(ECSchema schema) {
|
|
||||||
super(schema);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -65,21 +60,17 @@ public class HHXORErasureEncoder extends AbstractErasureEncoder {
|
||||||
|
|
||||||
private RawErasureEncoder checkCreateRSRawEncoder() {
|
private RawErasureEncoder checkCreateRSRawEncoder() {
|
||||||
if (rsRawEncoder == null) {
|
if (rsRawEncoder == null) {
|
||||||
ErasureCoderOptions coderOptions = new ErasureCoderOptions(
|
|
||||||
getNumDataUnits(), getNumParityUnits());
|
|
||||||
rsRawEncoder = CodecUtil.createRawEncoder(getConf(),
|
rsRawEncoder = CodecUtil.createRawEncoder(getConf(),
|
||||||
ErasureCodeConstants.RS_DEFAULT_CODEC_NAME, coderOptions);
|
ErasureCodeConstants.RS_DEFAULT_CODEC_NAME, getOptions());
|
||||||
}
|
}
|
||||||
return rsRawEncoder;
|
return rsRawEncoder;
|
||||||
}
|
}
|
||||||
|
|
||||||
private RawErasureEncoder checkCreateXorRawEncoder() {
|
private RawErasureEncoder checkCreateXorRawEncoder() {
|
||||||
if (xorRawEncoder == null) {
|
if (xorRawEncoder == null) {
|
||||||
ErasureCoderOptions erasureCoderOptions = new ErasureCoderOptions(
|
|
||||||
getNumDataUnits(), getNumParityUnits());
|
|
||||||
xorRawEncoder = CodecUtil.createRawEncoder(getConf(),
|
xorRawEncoder = CodecUtil.createRawEncoder(getConf(),
|
||||||
ErasureCodeConstants.XOR_CODEC_NAME,
|
ErasureCodeConstants.XOR_CODEC_NAME,
|
||||||
erasureCoderOptions);
|
getOptions());
|
||||||
}
|
}
|
||||||
return xorRawEncoder;
|
return xorRawEncoder;
|
||||||
}
|
}
|
||||||
|
|
|
@ -31,7 +31,7 @@ import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
|
||||||
* encoding a block group.
|
* encoding a block group.
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class HHXORErasureEncodingStep extends AbstractHHErasureCodingStep {
|
public class HHXORErasureEncodingStep extends HHErasureCodingStep {
|
||||||
private int[] piggyBackIndex;
|
private int[] piggyBackIndex;
|
||||||
private RawErasureEncoder rsRawEncoder;
|
private RawErasureEncoder rsRawEncoder;
|
||||||
private RawErasureEncoder xorRawEncoder;
|
private RawErasureEncoder xorRawEncoder;
|
||||||
|
|
|
@ -21,7 +21,6 @@ import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.io.erasurecode.CodecUtil;
|
import org.apache.hadoop.io.erasurecode.CodecUtil;
|
||||||
import org.apache.hadoop.io.erasurecode.ECBlock;
|
import org.apache.hadoop.io.erasurecode.ECBlock;
|
||||||
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
|
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
|
||||||
import org.apache.hadoop.io.erasurecode.ECSchema;
|
|
||||||
import org.apache.hadoop.io.erasurecode.ErasureCodeConstants;
|
import org.apache.hadoop.io.erasurecode.ErasureCodeConstants;
|
||||||
import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
|
import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
|
||||||
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
|
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
|
||||||
|
@ -32,15 +31,11 @@ import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
|
||||||
* It implements {@link ErasureCoder}.
|
* It implements {@link ErasureCoder}.
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class RSErasureDecoder extends AbstractErasureDecoder {
|
public class RSErasureDecoder extends ErasureDecoder {
|
||||||
private RawErasureDecoder rsRawDecoder;
|
private RawErasureDecoder rsRawDecoder;
|
||||||
|
|
||||||
public RSErasureDecoder(int numDataUnits, int numParityUnits) {
|
public RSErasureDecoder(ErasureCoderOptions options) {
|
||||||
super(numDataUnits, numParityUnits);
|
super(options);
|
||||||
}
|
|
||||||
|
|
||||||
public RSErasureDecoder(ECSchema schema) {
|
|
||||||
super(schema);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -56,11 +51,8 @@ public class RSErasureDecoder extends AbstractErasureDecoder {
|
||||||
|
|
||||||
private RawErasureDecoder checkCreateRSRawDecoder() {
|
private RawErasureDecoder checkCreateRSRawDecoder() {
|
||||||
if (rsRawDecoder == null) {
|
if (rsRawDecoder == null) {
|
||||||
// TODO: we should create the raw coder according to codec.
|
|
||||||
ErasureCoderOptions coderOptions = new ErasureCoderOptions(
|
|
||||||
getNumDataUnits(), getNumParityUnits());
|
|
||||||
rsRawDecoder = CodecUtil.createRawDecoder(getConf(),
|
rsRawDecoder = CodecUtil.createRawDecoder(getConf(),
|
||||||
ErasureCodeConstants.RS_DEFAULT_CODEC_NAME, coderOptions);
|
ErasureCodeConstants.RS_DEFAULT_CODEC_NAME, getOptions());
|
||||||
}
|
}
|
||||||
return rsRawDecoder;
|
return rsRawDecoder;
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,7 +21,6 @@ import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.io.erasurecode.CodecUtil;
|
import org.apache.hadoop.io.erasurecode.CodecUtil;
|
||||||
import org.apache.hadoop.io.erasurecode.ECBlock;
|
import org.apache.hadoop.io.erasurecode.ECBlock;
|
||||||
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
|
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
|
||||||
import org.apache.hadoop.io.erasurecode.ECSchema;
|
|
||||||
import org.apache.hadoop.io.erasurecode.ErasureCodeConstants;
|
import org.apache.hadoop.io.erasurecode.ErasureCodeConstants;
|
||||||
import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
|
import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
|
||||||
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
|
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
|
||||||
|
@ -32,15 +31,11 @@ import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
|
||||||
* It implements {@link ErasureCoder}.
|
* It implements {@link ErasureCoder}.
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class RSErasureEncoder extends AbstractErasureEncoder {
|
public class RSErasureEncoder extends ErasureEncoder {
|
||||||
private RawErasureEncoder rawEncoder;
|
private RawErasureEncoder rawEncoder;
|
||||||
|
|
||||||
public RSErasureEncoder(int numDataUnits, int numParityUnits) {
|
public RSErasureEncoder(ErasureCoderOptions options) {
|
||||||
super(numDataUnits, numParityUnits);
|
super(options);
|
||||||
}
|
|
||||||
|
|
||||||
public RSErasureEncoder(ECSchema schema) {
|
|
||||||
super(schema);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
|
@ -57,10 +52,8 @@ public class RSErasureEncoder extends AbstractErasureEncoder {
|
||||||
private RawErasureEncoder checkCreateRSRawEncoder() {
|
private RawErasureEncoder checkCreateRSRawEncoder() {
|
||||||
if (rawEncoder == null) {
|
if (rawEncoder == null) {
|
||||||
// TODO: we should create the raw coder according to codec.
|
// TODO: we should create the raw coder according to codec.
|
||||||
ErasureCoderOptions coderOptions = new ErasureCoderOptions(
|
|
||||||
getNumDataUnits(), getNumParityUnits());
|
|
||||||
rawEncoder = CodecUtil.createRawEncoder(getConf(),
|
rawEncoder = CodecUtil.createRawEncoder(getConf(),
|
||||||
ErasureCodeConstants.RS_DEFAULT_CODEC_NAME, coderOptions);
|
ErasureCodeConstants.RS_DEFAULT_CODEC_NAME, getOptions());
|
||||||
}
|
}
|
||||||
return rawEncoder;
|
return rawEncoder;
|
||||||
}
|
}
|
||||||
|
@ -71,4 +64,9 @@ public class RSErasureEncoder extends AbstractErasureEncoder {
|
||||||
rawEncoder.release();
|
rawEncoder.release();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean preferDirectBuffer() {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,7 +21,6 @@ import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.io.erasurecode.CodecUtil;
|
import org.apache.hadoop.io.erasurecode.CodecUtil;
|
||||||
import org.apache.hadoop.io.erasurecode.ECBlock;
|
import org.apache.hadoop.io.erasurecode.ECBlock;
|
||||||
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
|
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
|
||||||
import org.apache.hadoop.io.erasurecode.ECSchema;
|
|
||||||
import org.apache.hadoop.io.erasurecode.ErasureCodeConstants;
|
import org.apache.hadoop.io.erasurecode.ErasureCodeConstants;
|
||||||
import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
|
import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
|
||||||
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
|
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
|
||||||
|
@ -32,23 +31,17 @@ import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureDecoder;
|
||||||
* It implements {@link ErasureCoder}.
|
* It implements {@link ErasureCoder}.
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class XORErasureDecoder extends AbstractErasureDecoder {
|
public class XORErasureDecoder extends ErasureDecoder {
|
||||||
|
|
||||||
public XORErasureDecoder(int numDataUnits, int numParityUnits) {
|
public XORErasureDecoder(ErasureCoderOptions options) {
|
||||||
super(numDataUnits, numParityUnits);
|
super(options);
|
||||||
}
|
|
||||||
|
|
||||||
public XORErasureDecoder(ECSchema schema) {
|
|
||||||
super(schema);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected ErasureCodingStep prepareDecodingStep(
|
protected ErasureCodingStep prepareDecodingStep(
|
||||||
final ECBlockGroup blockGroup) {
|
final ECBlockGroup blockGroup) {
|
||||||
ErasureCoderOptions coderOptions = new ErasureCoderOptions(
|
|
||||||
getNumDataUnits(), getNumParityUnits());
|
|
||||||
RawErasureDecoder rawDecoder = CodecUtil.createRawDecoder(getConf(),
|
RawErasureDecoder rawDecoder = CodecUtil.createRawDecoder(getConf(),
|
||||||
ErasureCodeConstants.XOR_CODEC_NAME, coderOptions);
|
ErasureCodeConstants.XOR_CODEC_NAME, getOptions());
|
||||||
|
|
||||||
ECBlock[] inputBlocks = getInputBlocks(blockGroup);
|
ECBlock[] inputBlocks = getInputBlocks(blockGroup);
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,6 @@ import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.io.erasurecode.CodecUtil;
|
import org.apache.hadoop.io.erasurecode.CodecUtil;
|
||||||
import org.apache.hadoop.io.erasurecode.ECBlock;
|
import org.apache.hadoop.io.erasurecode.ECBlock;
|
||||||
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
|
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
|
||||||
import org.apache.hadoop.io.erasurecode.ECSchema;
|
|
||||||
import org.apache.hadoop.io.erasurecode.ErasureCodeConstants;
|
import org.apache.hadoop.io.erasurecode.ErasureCodeConstants;
|
||||||
import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
|
import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
|
||||||
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
|
import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
|
||||||
|
@ -32,28 +31,21 @@ import org.apache.hadoop.io.erasurecode.rawcoder.RawErasureEncoder;
|
||||||
* It implements {@link ErasureCoder}.
|
* It implements {@link ErasureCoder}.
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
public class XORErasureEncoder extends AbstractErasureEncoder {
|
public class XORErasureEncoder extends ErasureEncoder {
|
||||||
|
|
||||||
public XORErasureEncoder(int numDataUnits, int numParityUnits) {
|
public XORErasureEncoder(ErasureCoderOptions options) {
|
||||||
super(numDataUnits, numParityUnits);
|
super(options);
|
||||||
}
|
|
||||||
|
|
||||||
public XORErasureEncoder(ECSchema schema) {
|
|
||||||
super(schema);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected ErasureCodingStep prepareEncodingStep(
|
protected ErasureCodingStep prepareEncodingStep(
|
||||||
final ECBlockGroup blockGroup) {
|
final ECBlockGroup blockGroup) {
|
||||||
ErasureCoderOptions coderOptions = new ErasureCoderOptions(
|
|
||||||
getNumDataUnits(), getNumParityUnits());
|
|
||||||
RawErasureEncoder rawEncoder = CodecUtil.createRawEncoder(getConf(),
|
RawErasureEncoder rawEncoder = CodecUtil.createRawEncoder(getConf(),
|
||||||
ErasureCodeConstants.XOR_CODEC_NAME, coderOptions);
|
ErasureCodeConstants.XOR_CODEC_NAME, getOptions());
|
||||||
|
|
||||||
ECBlock[] inputBlocks = getInputBlocks(blockGroup);
|
ECBlock[] inputBlocks = getInputBlocks(blockGroup);
|
||||||
|
|
||||||
return new ErasureEncodingStep(inputBlocks,
|
return new ErasureEncodingStep(inputBlocks,
|
||||||
getOutputBlocks(blockGroup), rawEncoder);
|
getOutputBlocks(blockGroup), rawEncoder);
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -15,35 +15,14 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Erasure coders framework.
|
||||||
|
*/
|
||||||
|
@InterfaceAudience.Private
|
||||||
|
@InterfaceStability.Unstable
|
||||||
package org.apache.hadoop.io.erasurecode.coder;
|
package org.apache.hadoop.io.erasurecode.coder;
|
||||||
|
|
||||||
import org.apache.hadoop.classification.InterfaceAudience;
|
import org.apache.hadoop.classification.InterfaceAudience;
|
||||||
import org.apache.hadoop.io.erasurecode.ECBlock;
|
import org.apache.hadoop.classification.InterfaceStability;
|
||||||
|
|
||||||
/**
|
|
||||||
* Abstract class for Hitchhiker common facilities shared by
|
|
||||||
* {@link HHXORErasureEncodingStep}and {@link HHXORErasureDecodingStep}.
|
|
||||||
*
|
|
||||||
* It implements {@link AbstractErasureCodingStep}.
|
|
||||||
*/
|
|
||||||
@InterfaceAudience.Private
|
|
||||||
public abstract class AbstractHHErasureCodingStep
|
|
||||||
extends AbstractErasureCodingStep {
|
|
||||||
|
|
||||||
private static final int SUB_PACKET_SIZE = 2;
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Constructor given input blocks and output blocks.
|
|
||||||
*
|
|
||||||
* @param inputBlocks
|
|
||||||
* @param outputBlocks
|
|
||||||
*/
|
|
||||||
public AbstractHHErasureCodingStep(ECBlock[] inputBlocks,
|
|
||||||
ECBlock[] outputBlocks) {
|
|
||||||
super(inputBlocks, outputBlocks);
|
|
||||||
}
|
|
||||||
|
|
||||||
protected int getSubPacketSize() {
|
|
||||||
return SUB_PACKET_SIZE;
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -28,7 +28,7 @@ import java.util.Arrays;
|
||||||
* Helpful utilities for implementing some raw erasure coders.
|
* Helpful utilities for implementing some raw erasure coders.
|
||||||
*/
|
*/
|
||||||
@InterfaceAudience.Private
|
@InterfaceAudience.Private
|
||||||
final class CoderUtil {
|
public final class CoderUtil {
|
||||||
|
|
||||||
private CoderUtil() {
|
private CoderUtil() {
|
||||||
// No called
|
// No called
|
||||||
|
|
|
@ -29,6 +29,7 @@ import org.apache.hadoop.fs.local.LocalConfigKeys;
|
||||||
import org.apache.hadoop.ha.SshFenceByTcpPort;
|
import org.apache.hadoop.ha.SshFenceByTcpPort;
|
||||||
import org.apache.hadoop.ha.ZKFailoverController;
|
import org.apache.hadoop.ha.ZKFailoverController;
|
||||||
import org.apache.hadoop.http.HttpServer2;
|
import org.apache.hadoop.http.HttpServer2;
|
||||||
|
import org.apache.hadoop.io.erasurecode.CodecUtil;
|
||||||
import org.apache.hadoop.io.nativeio.NativeIO;
|
import org.apache.hadoop.io.nativeio.NativeIO;
|
||||||
import org.apache.hadoop.security.CompositeGroupsMapping;
|
import org.apache.hadoop.security.CompositeGroupsMapping;
|
||||||
import org.apache.hadoop.security.HttpCrossOriginFilterInitializer;
|
import org.apache.hadoop.security.HttpCrossOriginFilterInitializer;
|
||||||
|
@ -49,6 +50,7 @@ import org.apache.hadoop.security.ssl.SSLFactory;
|
||||||
* {@link org.apache.hadoop.security.LdapGroupsMapping}
|
* {@link org.apache.hadoop.security.LdapGroupsMapping}
|
||||||
* {@link org.apache.hadoop.security.http.CrossOriginFilter}
|
* {@link org.apache.hadoop.security.http.CrossOriginFilter}
|
||||||
* {@link org.apache.hadoop.security.ssl.SSLFactory}
|
* {@link org.apache.hadoop.security.ssl.SSLFactory}
|
||||||
|
* {@link org.apache.hadoop.io.erasurecode.rawcoder.CoderUtil}
|
||||||
* <p></p>
|
* <p></p>
|
||||||
* against core-site.xml for missing properties. Currently only
|
* against core-site.xml for missing properties. Currently only
|
||||||
* throws an error if the class is missing a property.
|
* throws an error if the class is missing a property.
|
||||||
|
@ -71,7 +73,8 @@ public class TestCommonConfigurationFields extends TestConfigurationFieldsBase {
|
||||||
LdapGroupsMapping.class,
|
LdapGroupsMapping.class,
|
||||||
ZKFailoverController.class,
|
ZKFailoverController.class,
|
||||||
SSLFactory.class,
|
SSLFactory.class,
|
||||||
CompositeGroupsMapping.class
|
CompositeGroupsMapping.class,
|
||||||
|
CodecUtil.class
|
||||||
};
|
};
|
||||||
|
|
||||||
// Initialize used variables
|
// Initialize used variables
|
||||||
|
|
|
@ -18,7 +18,6 @@
|
||||||
package org.apache.hadoop.io.erasurecode;
|
package org.apache.hadoop.io.erasurecode;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
|
||||||
import org.apache.hadoop.io.erasurecode.rawcoder.RSRawDecoder;
|
import org.apache.hadoop.io.erasurecode.rawcoder.RSRawDecoder;
|
||||||
import org.apache.hadoop.io.erasurecode.rawcoder.RSRawDecoderLegacy;
|
import org.apache.hadoop.io.erasurecode.rawcoder.RSRawDecoderLegacy;
|
||||||
import org.apache.hadoop.io.erasurecode.rawcoder.RSRawEncoder;
|
import org.apache.hadoop.io.erasurecode.rawcoder.RSRawEncoder;
|
||||||
|
@ -73,7 +72,7 @@ public class TestCodecRawCoderMapping {
|
||||||
String dummyFactName = "DummyNoneExistingFactory";
|
String dummyFactName = "DummyNoneExistingFactory";
|
||||||
// set the dummy factory to rs-legacy and create a raw coder
|
// set the dummy factory to rs-legacy and create a raw coder
|
||||||
// with rs-default, which is OK as the raw coder key is not used
|
// with rs-default, which is OK as the raw coder key is not used
|
||||||
conf.set(CommonConfigurationKeys.
|
conf.set(CodecUtil.
|
||||||
IO_ERASURECODE_CODEC_RS_LEGACY_RAWCODER_KEY, dummyFactName);
|
IO_ERASURECODE_CODEC_RS_LEGACY_RAWCODER_KEY, dummyFactName);
|
||||||
RawErasureEncoder encoder = CodecUtil.createRawEncoder(conf,
|
RawErasureEncoder encoder = CodecUtil.createRawEncoder(conf,
|
||||||
ErasureCodeConstants.RS_DEFAULT_CODEC_NAME, coderOptions);
|
ErasureCodeConstants.RS_DEFAULT_CODEC_NAME, coderOptions);
|
||||||
|
|
|
@ -17,7 +17,9 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.io.erasurecode.codec;
|
package org.apache.hadoop.io.erasurecode.codec;
|
||||||
|
|
||||||
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.io.erasurecode.ECSchema;
|
import org.apache.hadoop.io.erasurecode.ECSchema;
|
||||||
|
import org.apache.hadoop.io.erasurecode.ErasureCodecOptions;
|
||||||
import org.apache.hadoop.io.erasurecode.coder.ErasureCoder;
|
import org.apache.hadoop.io.erasurecode.coder.ErasureCoder;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
|
@ -25,10 +27,12 @@ import static org.junit.Assert.assertEquals;
|
||||||
|
|
||||||
public class TestHHXORErasureCodec {
|
public class TestHHXORErasureCodec {
|
||||||
private ECSchema schema = new ECSchema("hhxor", 10, 4);
|
private ECSchema schema = new ECSchema("hhxor", 10, 4);
|
||||||
|
private ErasureCodecOptions options = new ErasureCodecOptions(schema);
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testGoodCodec() {
|
public void testGoodCodec() {
|
||||||
HHXORErasureCodec codec = new HHXORErasureCodec(schema);
|
HHXORErasureCodec codec
|
||||||
|
= new HHXORErasureCodec(new Configuration(), options);
|
||||||
ErasureCoder encoder = codec.createEncoder();
|
ErasureCoder encoder = codec.createEncoder();
|
||||||
assertEquals(10, encoder.getNumDataUnits());
|
assertEquals(10, encoder.getNumDataUnits());
|
||||||
assertEquals(4, encoder.getNumParityUnits());
|
assertEquals(4, encoder.getNumParityUnits());
|
||||||
|
|
|
@ -20,6 +20,7 @@ package org.apache.hadoop.io.erasurecode.coder;
|
||||||
import org.apache.hadoop.io.erasurecode.ECBlock;
|
import org.apache.hadoop.io.erasurecode.ECBlock;
|
||||||
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
|
import org.apache.hadoop.io.erasurecode.ECBlockGroup;
|
||||||
import org.apache.hadoop.io.erasurecode.ECChunk;
|
import org.apache.hadoop.io.erasurecode.ECChunk;
|
||||||
|
import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
|
||||||
import org.apache.hadoop.io.erasurecode.TestCoderBase;
|
import org.apache.hadoop.io.erasurecode.TestCoderBase;
|
||||||
|
|
||||||
import java.lang.reflect.Constructor;
|
import java.lang.reflect.Constructor;
|
||||||
|
@ -158,10 +159,12 @@ public abstract class TestErasureCoderBase extends TestCoderBase {
|
||||||
protected ErasureCoder createEncoder() {
|
protected ErasureCoder createEncoder() {
|
||||||
ErasureCoder encoder;
|
ErasureCoder encoder;
|
||||||
try {
|
try {
|
||||||
|
ErasureCoderOptions options = new ErasureCoderOptions(
|
||||||
|
numDataUnits, numParityUnits, allowChangeInputs, allowDump);
|
||||||
Constructor<? extends ErasureCoder> constructor =
|
Constructor<? extends ErasureCoder> constructor =
|
||||||
(Constructor<? extends ErasureCoder>)
|
(Constructor<? extends ErasureCoder>)
|
||||||
encoderClass.getConstructor(int.class, int.class);
|
encoderClass.getConstructor(ErasureCoderOptions.class);
|
||||||
encoder = constructor.newInstance(numDataUnits, numParityUnits);
|
encoder = constructor.newInstance(options);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new RuntimeException("Failed to create encoder", e);
|
throw new RuntimeException("Failed to create encoder", e);
|
||||||
}
|
}
|
||||||
|
@ -177,10 +180,12 @@ public abstract class TestErasureCoderBase extends TestCoderBase {
|
||||||
protected ErasureCoder createDecoder() {
|
protected ErasureCoder createDecoder() {
|
||||||
ErasureCoder decoder;
|
ErasureCoder decoder;
|
||||||
try {
|
try {
|
||||||
|
ErasureCoderOptions options = new ErasureCoderOptions(
|
||||||
|
numDataUnits, numParityUnits, allowChangeInputs, allowDump);
|
||||||
Constructor<? extends ErasureCoder> constructor =
|
Constructor<? extends ErasureCoder> constructor =
|
||||||
(Constructor<? extends ErasureCoder>)
|
(Constructor<? extends ErasureCoder>)
|
||||||
decoderClass.getConstructor(int.class, int.class);
|
decoderClass.getConstructor(ErasureCoderOptions.class);
|
||||||
decoder = constructor.newInstance(numDataUnits, numParityUnits);
|
decoder = constructor.newInstance(options);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
throw new RuntimeException("Failed to create decoder", e);
|
throw new RuntimeException("Failed to create decoder", e);
|
||||||
}
|
}
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
package org.apache.hadoop.io.erasurecode.coder;
|
package org.apache.hadoop.io.erasurecode.coder;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
import org.apache.hadoop.io.erasurecode.CodecUtil;
|
||||||
import org.apache.hadoop.io.erasurecode.rawcoder.RSRawErasureCoderFactory;
|
import org.apache.hadoop.io.erasurecode.rawcoder.RSRawErasureCoderFactory;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
@ -50,7 +50,7 @@ public class TestHHXORErasureCoder extends TestHHErasureCoderBase {
|
||||||
* This tests if the configuration items work or not.
|
* This tests if the configuration items work or not.
|
||||||
*/
|
*/
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
conf.set(CommonConfigurationKeys.IO_ERASURECODE_CODEC_RS_DEFAULT_RAWCODER_KEY,
|
conf.set(CodecUtil.IO_ERASURECODE_CODEC_RS_DEFAULT_RAWCODER_KEY,
|
||||||
RSRawErasureCoderFactory.class.getCanonicalName());
|
RSRawErasureCoderFactory.class.getCanonicalName());
|
||||||
prepare(conf, 10, 4, new int[]{0}, new int[0]);
|
prepare(conf, 10, 4, new int[]{0}, new int[0]);
|
||||||
|
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
package org.apache.hadoop.io.erasurecode.coder;
|
package org.apache.hadoop.io.erasurecode.coder;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
import org.apache.hadoop.io.erasurecode.CodecUtil;
|
||||||
import org.apache.hadoop.io.erasurecode.rawcoder.RSRawErasureCoderFactory;
|
import org.apache.hadoop.io.erasurecode.rawcoder.RSRawErasureCoderFactory;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Rule;
|
import org.junit.Rule;
|
||||||
|
@ -57,7 +57,7 @@ public class TestRSErasureCoder extends TestErasureCoderBase {
|
||||||
* This tests if the configuration items work or not.
|
* This tests if the configuration items work or not.
|
||||||
*/
|
*/
|
||||||
Configuration conf = new Configuration();
|
Configuration conf = new Configuration();
|
||||||
conf.set(CommonConfigurationKeys.IO_ERASURECODE_CODEC_RS_DEFAULT_RAWCODER_KEY,
|
conf.set(CodecUtil.IO_ERASURECODE_CODEC_RS_DEFAULT_RAWCODER_KEY,
|
||||||
RSRawErasureCoderFactory.class.getCanonicalName());
|
RSRawErasureCoderFactory.class.getCanonicalName());
|
||||||
prepare(conf, 10, 4, new int[]{0}, new int[0]);
|
prepare(conf, 10, 4, new int[]{0}, new int[0]);
|
||||||
|
|
||||||
|
|
|
@ -20,7 +20,6 @@ package org.apache.hadoop.hdfs;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hdfs.protocol.Block;
|
import org.apache.hadoop.hdfs.protocol.Block;
|
||||||
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy;
|
||||||
|
@ -83,7 +82,7 @@ public class TestDFSStripedInputStream {
|
||||||
conf.setInt(DFSConfigKeys.DFS_NAMENODE_REPLICATION_MAX_STREAMS_KEY, 0);
|
conf.setInt(DFSConfigKeys.DFS_NAMENODE_REPLICATION_MAX_STREAMS_KEY, 0);
|
||||||
if (ErasureCodeNative.isNativeCodeLoaded()) {
|
if (ErasureCodeNative.isNativeCodeLoaded()) {
|
||||||
conf.set(
|
conf.set(
|
||||||
CommonConfigurationKeys.IO_ERASURECODE_CODEC_RS_DEFAULT_RAWCODER_KEY,
|
CodecUtil.IO_ERASURECODE_CODEC_RS_DEFAULT_RAWCODER_KEY,
|
||||||
NativeRSRawErasureCoderFactory.class.getCanonicalName());
|
NativeRSRawErasureCoderFactory.class.getCanonicalName());
|
||||||
}
|
}
|
||||||
SimulatedFSDataset.setFactory(conf);
|
SimulatedFSDataset.setFactory(conf);
|
||||||
|
|
|
@ -19,15 +19,13 @@ package org.apache.hadoop.hdfs;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Collections;
|
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
|
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
|
||||||
|
import org.apache.hadoop.io.erasurecode.CodecUtil;
|
||||||
import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
|
import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
|
||||||
import org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawErasureCoderFactory;
|
import org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawErasureCoderFactory;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
|
@ -70,7 +68,7 @@ public class TestDFSStripedOutputStream {
|
||||||
conf.setInt(DFSConfigKeys.DFS_NAMENODE_REPLICATION_MAX_STREAMS_KEY, 0);
|
conf.setInt(DFSConfigKeys.DFS_NAMENODE_REPLICATION_MAX_STREAMS_KEY, 0);
|
||||||
if (ErasureCodeNative.isNativeCodeLoaded()) {
|
if (ErasureCodeNative.isNativeCodeLoaded()) {
|
||||||
conf.set(
|
conf.set(
|
||||||
CommonConfigurationKeys.IO_ERASURECODE_CODEC_RS_DEFAULT_RAWCODER_KEY,
|
CodecUtil.IO_ERASURECODE_CODEC_RS_DEFAULT_RAWCODER_KEY,
|
||||||
NativeRSRawErasureCoderFactory.class.getCanonicalName());
|
NativeRSRawErasureCoderFactory.class.getCanonicalName());
|
||||||
}
|
}
|
||||||
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDNs).build();
|
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDNs).build();
|
||||||
|
|
|
@ -22,7 +22,6 @@ import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.commons.logging.impl.Log4JLogger;
|
import org.apache.commons.logging.impl.Log4JLogger;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
|
||||||
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
import org.apache.hadoop.fs.CommonConfigurationKeysPublic;
|
||||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
@ -37,6 +36,7 @@ import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager;
|
||||||
import org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy;
|
import org.apache.hadoop.hdfs.server.blockmanagement.BlockPlacementPolicy;
|
||||||
import org.apache.hadoop.hdfs.server.datanode.DataNode;
|
import org.apache.hadoop.hdfs.server.datanode.DataNode;
|
||||||
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
import org.apache.hadoop.hdfs.server.namenode.NameNode;
|
||||||
|
import org.apache.hadoop.io.erasurecode.CodecUtil;
|
||||||
import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
|
import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
|
||||||
import org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawErasureCoderFactory;
|
import org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawErasureCoderFactory;
|
||||||
import org.apache.hadoop.security.token.Token;
|
import org.apache.hadoop.security.token.Token;
|
||||||
|
@ -188,7 +188,7 @@ public class TestDFSStripedOutputStreamWithFailure {
|
||||||
final int numDNs = NUM_DATA_BLOCKS + NUM_PARITY_BLOCKS;
|
final int numDNs = NUM_DATA_BLOCKS + NUM_PARITY_BLOCKS;
|
||||||
if (ErasureCodeNative.isNativeCodeLoaded()) {
|
if (ErasureCodeNative.isNativeCodeLoaded()) {
|
||||||
conf.set(
|
conf.set(
|
||||||
CommonConfigurationKeys.IO_ERASURECODE_CODEC_RS_DEFAULT_RAWCODER_KEY,
|
CodecUtil.IO_ERASURECODE_CODEC_RS_DEFAULT_RAWCODER_KEY,
|
||||||
NativeRSRawErasureCoderFactory.class.getCanonicalName());
|
NativeRSRawErasureCoderFactory.class.getCanonicalName());
|
||||||
}
|
}
|
||||||
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDNs).build();
|
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(numDNs).build();
|
||||||
|
|
|
@ -33,7 +33,6 @@ import java.util.Random;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.CommonConfigurationKeys;
|
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hdfs.protocol.DatanodeID;
|
import org.apache.hadoop.hdfs.protocol.DatanodeID;
|
||||||
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
|
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
|
||||||
|
@ -48,6 +47,7 @@ import org.apache.hadoop.hdfs.server.namenode.ErasureCodingPolicyManager;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
|
import org.apache.hadoop.hdfs.server.protocol.DatanodeStorage;
|
||||||
import org.apache.hadoop.hdfs.server.protocol.BlockECReconstructionCommand.BlockECReconstructionInfo;
|
import org.apache.hadoop.hdfs.server.protocol.BlockECReconstructionCommand.BlockECReconstructionInfo;
|
||||||
import org.apache.hadoop.hdfs.util.StripedBlockUtil;
|
import org.apache.hadoop.hdfs.util.StripedBlockUtil;
|
||||||
|
import org.apache.hadoop.io.erasurecode.CodecUtil;
|
||||||
import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
|
import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
|
||||||
import org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawErasureCoderFactory;
|
import org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawErasureCoderFactory;
|
||||||
import org.apache.hadoop.test.GenericTestUtils;
|
import org.apache.hadoop.test.GenericTestUtils;
|
||||||
|
@ -97,7 +97,7 @@ public class TestReconstructStripedFile {
|
||||||
false);
|
false);
|
||||||
if (ErasureCodeNative.isNativeCodeLoaded()) {
|
if (ErasureCodeNative.isNativeCodeLoaded()) {
|
||||||
conf.set(
|
conf.set(
|
||||||
CommonConfigurationKeys.IO_ERASURECODE_CODEC_RS_DEFAULT_RAWCODER_KEY,
|
CodecUtil.IO_ERASURECODE_CODEC_RS_DEFAULT_RAWCODER_KEY,
|
||||||
NativeRSRawErasureCoderFactory.class.getCanonicalName());
|
NativeRSRawErasureCoderFactory.class.getCanonicalName());
|
||||||
}
|
}
|
||||||
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(dnNum).build();
|
cluster = new MiniDFSCluster.Builder(conf).numDataNodes(dnNum).build();
|
||||||
|
|
Loading…
Reference in New Issue