HADOOP-11540. Raw Reed-Solomon coder using Intel ISA-L library. Contributed by Kai Zheng

This commit is contained in:
Kai Zheng 2016-08-01 06:34:26 +08:00
parent e5766b1dbe
commit 34ccaa8367
21 changed files with 631 additions and 128 deletions

View File

@ -47,6 +47,19 @@
<Field name="out" />
<Bug pattern="IS2_INCONSISTENT_SYNC" />
</Match>
<!--
The nativeCoder field is get/set and used by native codes.
-->
<Match>
<Class name="org.apache.hadoop.io.erasurecode.rawcoder.AbstractNativeRawEncoder" />
<Field name="nativeCoder" />
<Bug pattern="UUF_UNUSED_FIELD" />
</Match>
<Match>
<Class name="org.apache.hadoop.io.erasurecode.rawcoder.AbstractNativeRawDecoder" />
<Field name="nativeCoder" />
<Bug pattern="UUF_UNUSED_FIELD" />
</Match>
<!--
Further SaslException should be ignored during cleanup and
original exception should be re-thrown.

View File

@ -631,6 +631,8 @@
<javahClassName>org.apache.hadoop.io.compress.lz4.Lz4Compressor</javahClassName>
<javahClassName>org.apache.hadoop.io.compress.lz4.Lz4Decompressor</javahClassName>
<javahClassName>org.apache.hadoop.io.erasurecode.ErasureCodeNative</javahClassName>
<javahClassName>org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawEncoder</javahClassName>
<javahClassName>org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawDecoder</javahClassName>
<javahClassName>org.apache.hadoop.crypto.OpensslCipher</javahClassName>
<javahClassName>org.apache.hadoop.crypto.random.OpensslSecureRandom</javahClassName>
<javahClassName>org.apache.hadoop.util.NativeCrc32</javahClassName>
@ -769,6 +771,8 @@
<javahClassName>org.apache.hadoop.io.compress.lz4.Lz4Compressor</javahClassName>
<javahClassName>org.apache.hadoop.io.compress.lz4.Lz4Decompressor</javahClassName>
<javahClassName>org.apache.hadoop.io.erasurecode.ErasureCodeNative</javahClassName>
<javahClassName>org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawEncoder</javahClassName>
<javahClassName>org.apache.hadoop.io.erasurecode.rawcoder.NativeRSRawDecoder</javahClassName>
<javahClassName>org.apache.hadoop.crypto.OpensslCipher</javahClassName>
<javahClassName>org.apache.hadoop.crypto.random.OpensslSecureRandom</javahClassName>
<javahClassName>org.apache.hadoop.util.NativeCrc32</javahClassName>

View File

@ -0,0 +1,84 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.nio.ByteBuffer;
/**
* Abstract native raw decoder for all native coders to extend with.
*/
@InterfaceAudience.Private
abstract class AbstractNativeRawDecoder extends RawErasureDecoder {
public static Logger LOG =
LoggerFactory.getLogger(AbstractNativeRawDecoder.class);
public AbstractNativeRawDecoder(ErasureCoderOptions coderOptions) {
super(coderOptions);
}
@Override
protected void doDecode(ByteBufferDecodingState decodingState) {
int[] inputOffsets = new int[decodingState.inputs.length];
int[] outputOffsets = new int[decodingState.outputs.length];
ByteBuffer buffer;
for (int i = 0; i < decodingState.inputs.length; ++i) {
buffer = decodingState.inputs[i];
if (buffer != null) {
inputOffsets[i] = buffer.position();
}
}
for (int i = 0; i < decodingState.outputs.length; ++i) {
buffer = decodingState.outputs[i];
outputOffsets[i] = buffer.position();
}
performDecodeImpl(decodingState.inputs, inputOffsets,
decodingState.decodeLength, decodingState.erasedIndexes,
decodingState.outputs, outputOffsets);
}
protected abstract void performDecodeImpl(ByteBuffer[] inputs,
int[] inputOffsets, int dataLen,
int[] erased, ByteBuffer[] outputs,
int[] outputOffsets);
@Override
protected void doDecode(ByteArrayDecodingState decodingState) {
LOG.warn("convertToByteBufferState is invoked, " +
"not efficiently. Please use direct ByteBuffer inputs/outputs");
ByteBufferDecodingState bbdState = decodingState.convertToByteBufferState();
doDecode(bbdState);
for (int i = 0; i < decodingState.outputs.length; i++) {
bbdState.outputs[i].get(decodingState.outputs[i],
decodingState.outputOffsets[i], decodingState.decodeLength);
}
}
// To link with the underlying data structure in the native layer.
// No get/set as only used by native codes.
private long nativeCoder;
}

View File

@ -0,0 +1,81 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.nio.ByteBuffer;
/**
* Abstract native raw encoder for all native coders to extend with.
*/
@InterfaceAudience.Private
abstract class AbstractNativeRawEncoder extends RawErasureEncoder {
public static Logger LOG =
LoggerFactory.getLogger(AbstractNativeRawEncoder.class);
public AbstractNativeRawEncoder(ErasureCoderOptions coderOptions) {
super(coderOptions);
}
@Override
protected void doEncode(ByteBufferEncodingState encodingState) {
int[] inputOffsets = new int[encodingState.inputs.length];
int[] outputOffsets = new int[encodingState.outputs.length];
int dataLen = encodingState.inputs[0].remaining();
ByteBuffer buffer;
for (int i = 0; i < encodingState.inputs.length; ++i) {
buffer = encodingState.inputs[i];
inputOffsets[i] = buffer.position();
}
for (int i = 0; i < encodingState.outputs.length; ++i) {
buffer = encodingState.outputs[i];
outputOffsets[i] = buffer.position();
}
performEncodeImpl(encodingState.inputs, inputOffsets, dataLen,
encodingState.outputs, outputOffsets);
}
protected abstract void performEncodeImpl(
ByteBuffer[] inputs, int[] inputOffsets,
int dataLen, ByteBuffer[] outputs, int[] outputOffsets);
@Override
protected void doEncode(ByteArrayEncodingState encodingState) {
LOG.warn("convertToByteBufferState is invoked, " +
"not efficiently. Please use direct ByteBuffer inputs/outputs");
ByteBufferEncodingState bbeState = encodingState.convertToByteBufferState();
doEncode(bbeState);
for (int i = 0; i < encodingState.outputs.length; i++) {
bbeState.outputs[i].get(encodingState.outputs[i],
encodingState.outputOffsets[i], encodingState.encodeLength);
}
}
// To link with the underlying data structure in the native layer.
// No get/set as only used by native codes.
private long nativeCoder;
}

View File

@ -20,6 +20,8 @@
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience;
import java.nio.ByteBuffer;
/**
* A utility class that maintains decoding state during a decode call using
* byte array inputs.
@ -65,6 +67,27 @@ class ByteArrayDecodingState extends DecodingState {
this.outputOffsets = outputOffsets;
}
/**
* Convert to a ByteBufferDecodingState when it's backed by on-heap arrays.
*/
ByteBufferDecodingState convertToByteBufferState() {
ByteBuffer[] newInputs = new ByteBuffer[inputs.length];
ByteBuffer[] newOutputs = new ByteBuffer[outputs.length];
for (int i = 0; i < inputs.length; i++) {
newInputs[i] = CoderUtil.cloneAsDirectByteBuffer(inputs[i],
inputOffsets[i], decodeLength);
}
for (int i = 0; i < outputs.length; i++) {
newOutputs[i] = ByteBuffer.allocateDirect(decodeLength);
}
ByteBufferDecodingState bbdState = new ByteBufferDecodingState(decoder,
decodeLength, erasedIndexes, newInputs, newOutputs);
return bbdState;
}
/**
* Check and ensure the buffers are of the desired length.
* @param buffers the buffers to check

View File

@ -20,6 +20,8 @@
import org.apache.hadoop.HadoopIllegalArgumentException;
import org.apache.hadoop.classification.InterfaceAudience;
import java.nio.ByteBuffer;
/**
* A utility class that maintains encoding state during an encode call using
* byte array inputs.
@ -61,6 +63,27 @@ class ByteArrayEncodingState extends EncodingState {
this.outputOffsets = outputOffsets;
}
/**
* Convert to a ByteBufferEncodingState when it's backed by on-heap arrays.
*/
ByteBufferEncodingState convertToByteBufferState() {
ByteBuffer[] newInputs = new ByteBuffer[inputs.length];
ByteBuffer[] newOutputs = new ByteBuffer[outputs.length];
for (int i = 0; i < inputs.length; i++) {
newInputs[i] = CoderUtil.cloneAsDirectByteBuffer(inputs[i],
inputOffsets[i], encodeLength);
}
for (int i = 0; i < outputs.length; i++) {
newOutputs[i] = ByteBuffer.allocateDirect(encodeLength);
}
ByteBufferEncodingState bbeState = new ByteBufferEncodingState(encoder,
encodeLength, newInputs, newOutputs);
return bbeState;
}
/**
* Check and ensure the buffers are of the desired length.
* @param buffers the buffers to check

View File

@ -48,8 +48,20 @@ class ByteBufferDecodingState extends DecodingState {
checkOutputBuffers(outputs);
}
ByteBufferDecodingState(RawErasureDecoder decoder,
int decodeLength,
int[] erasedIndexes,
ByteBuffer[] inputs,
ByteBuffer[] outputs) {
this.decoder = decoder;
this.decodeLength = decodeLength;
this.erasedIndexes = erasedIndexes;
this.inputs = inputs;
this.outputs = outputs;
}
/**
* Convert to a ByteArrayEncodingState when it's backed by on-heap arrays.
* Convert to a ByteArrayDecodingState when it's backed by on-heap arrays.
*/
ByteArrayDecodingState convertToByteArrayState() {
int[] inputOffsets = new int[inputs.length];

View File

@ -46,6 +46,16 @@ class ByteBufferEncodingState extends EncodingState {
checkBuffers(outputs);
}
ByteBufferEncodingState(RawErasureEncoder encoder,
int encodeLength,
ByteBuffer[] inputs,
ByteBuffer[] outputs) {
this.encoder = encoder;
this.encodeLength = encodeLength;
this.inputs = inputs;
this.outputs = outputs;
}
/**
* Convert to a ByteArrayEncodingState when it's backed by on-heap arrays.
*/

View File

@ -83,8 +83,6 @@ static byte[] resetBuffer(byte[] buffer, int offset, int len) {
/**
* Initialize the output buffers with ZERO bytes.
* @param buffers
* @param dataLen
*/
static void resetOutputBuffers(ByteBuffer[] buffers, int dataLen) {
for (ByteBuffer buffer : buffers) {
@ -94,8 +92,6 @@ static void resetOutputBuffers(ByteBuffer[] buffers, int dataLen) {
/**
* Initialize the output buffers with ZERO bytes.
* @param buffers
* @param dataLen
*/
static void resetOutputBuffers(byte[][] buffers, int[] offsets,
int dataLen) {
@ -127,10 +123,6 @@ static ByteBuffer[] toBuffers(ECChunk[] chunks) {
/**
* Clone an input bytes array as direct ByteBuffer.
* @param input
* @param len
* @param offset
* @return direct ByteBuffer
*/
static ByteBuffer cloneAsDirectByteBuffer(byte[] input, int offset, int len) {
if (input == null) { // an input can be null, if erased or not to read
@ -166,10 +158,6 @@ static <T> int[] getNullIndexes(T[] inputs) {
* @return the first valid input
*/
static <T> T findFirstValidInput(T[] inputs) {
if (inputs.length > 0 && inputs[0] != null) {
return inputs[0];
}
for (T input : inputs) {
if (input != null) {
return input;

View File

@ -0,0 +1,61 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
import java.nio.ByteBuffer;
/**
* A Reed-Solomon raw decoder using Intel ISA-L library.
*/
@InterfaceAudience.Private
public class NativeRSRawDecoder extends AbstractNativeRawDecoder {
static {
ErasureCodeNative.checkNativeCodeLoaded();
}
public NativeRSRawDecoder(ErasureCoderOptions coderOptions) {
super(coderOptions);
initImpl(coderOptions.getNumDataUnits(), coderOptions.getNumParityUnits());
}
@Override
protected void performDecodeImpl(ByteBuffer[] inputs, int[] inputOffsets,
int dataLen, int[] erased,
ByteBuffer[] outputs, int[] outputOffsets) {
decodeImpl(inputs, inputOffsets, dataLen, erased, outputs, outputOffsets);
}
@Override
public void release() {
destroyImpl();
}
private native void initImpl(int numDataUnits, int numParityUnits);
private native void decodeImpl(
ByteBuffer[] inputs, int[] inputOffsets, int dataLen, int[] erased,
ByteBuffer[] outputs, int[] outputOffsets);
private native void destroyImpl();
}

View File

@ -0,0 +1,60 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
import java.nio.ByteBuffer;
/**
* A Reed-Solomon raw encoder using Intel ISA-L library.
*/
@InterfaceAudience.Private
public class NativeRSRawEncoder extends AbstractNativeRawEncoder {
static {
ErasureCodeNative.checkNativeCodeLoaded();
}
public NativeRSRawEncoder(ErasureCoderOptions coderOptions) {
super(coderOptions);
initImpl(coderOptions.getNumDataUnits(), coderOptions.getNumParityUnits());
}
@Override
protected void performEncodeImpl(
ByteBuffer[] inputs, int[] inputOffsets, int dataLen,
ByteBuffer[] outputs, int[] outputOffsets) {
encodeImpl(inputs, inputOffsets, dataLen, outputs, outputOffsets);
}
@Override
public void release() {
destroyImpl();
}
private native void initImpl(int numDataUnits, int numParityUnits);
private native void encodeImpl(ByteBuffer[] inputs, int[] inputOffsets,
int dataLen, ByteBuffer[] outputs,
int[] outputOffsets);
private native void destroyImpl();
}

View File

@ -0,0 +1,39 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.io.erasurecode.ErasureCoderOptions;
/**
* A raw coder factory for raw Reed-Solomon coder in native using Intel ISA-L.
*/
@InterfaceAudience.Private
public class NativeRSRawErasureCoderFactory implements RawErasureCoderFactory {
@Override
public RawErasureEncoder createEncoder(ErasureCoderOptions coderOptions) {
return new NativeRSRawEncoder(coderOptions);
}
@Override
public RawErasureDecoder createDecoder(ErasureCoderOptions coderOptions) {
return new NativeRSRawDecoder(coderOptions);
}
}

View File

@ -30,14 +30,14 @@ public interface RawErasureCoderFactory {
/**
* Create raw erasure encoder.
* @param conf the configuration used to create the encoder
* @param coderOptions the options used to create the encoder
* @return raw erasure encoder
*/
RawErasureEncoder createEncoder(ErasureCoderOptions coderOptions);
/**
* Create raw erasure decoder.
* @param conf the configuration used to create the encoder
* @param coderOptions the options used to create the encoder
* @return raw erasure decoder
*/
RawErasureDecoder createDecoder(ErasureCoderOptions coderOptions);

View File

@ -17,12 +17,12 @@
*/
package org.apache.hadoop.io.erasurecode.rawcoder.util;
import org.apache.hadoop.classification.InterfaceAudience;
import java.nio.ByteBuffer;
import java.util.HashMap;
import java.util.Map;
import org.apache.hadoop.classification.InterfaceAudience;
/**
* Implementation of Galois field arithmetic with 2^p elements. The input must
* be unsigned integers. It's ported from HDFS-RAID, slightly adapted.

View File

@ -34,20 +34,31 @@ void loadLib(JNIEnv *env) {
void setCoder(JNIEnv* env, jobject thiz, IsalCoder* pCoder) {
jclass clazz = (*env)->GetObjectClass(env, thiz);
jfieldID __coderState = (*env)->GetFieldID(env, clazz, "__native_coder", "J");
(*env)->SetLongField(env, thiz, __coderState, (jlong) pCoder);
jfieldID fid = (*env)->GetFieldID(env, clazz, "nativeCoder", "J");
if (fid == NULL) {
THROW(env, "java/lang/UnsatisfiedLinkError",
"Field nativeCoder not found");
}
(*env)->SetLongField(env, thiz, fid, (jlong) pCoder);
}
IsalCoder* getCoder(JNIEnv* env, jobject thiz) {
jclass clazz = (*env)->GetObjectClass(env, thiz);
jfieldID __verbose = (*env)->GetFieldID(env, clazz, "__native_verbose", "J");
int verbose = (int)(*env)->GetIntField(env, thiz, __verbose);
jmethodID mid = (*env)->GetMethodID(env, clazz, "allowVerboseDump", "()Z");
if (mid == NULL) {
THROW(env, "java/lang/UnsatisfiedLinkError",
"Method allowVerboseDump not found");
}
jboolean verbose = (*env)->CallBooleanMethod(env, thiz, mid);
jfieldID __coderState = (*env)->GetFieldID(env, clazz, "__native_coder", "J");
IsalCoder* pCoder = (IsalCoder*)(*env)->GetLongField(env,
thiz, __coderState);
pCoder->verbose = verbose;
jfieldID fid = (*env)->GetFieldID(env, clazz, "nativeCoder", "J");
if (fid == NULL) {
THROW(env, "java/lang/UnsatisfiedLinkError",
"Field nativeCoder not found");
}
IsalCoder* pCoder = (IsalCoder*)(*env)->GetLongField(env, thiz, fid);
pCoder->verbose = (verbose == JNI_TRUE) ? 1 : 0;
return pCoder;
}

View File

@ -1,29 +0,0 @@
/* DO NOT EDIT THIS FILE - it is machine generated */
#include <jni.h>
/* Header for class org_apache_hadoop_io_erasurecode_ErasureCodeNative */
#ifndef _Included_org_apache_hadoop_io_erasurecode_ErasureCodeNative
#define _Included_org_apache_hadoop_io_erasurecode_ErasureCodeNative
#ifdef __cplusplus
extern "C" {
#endif
/*
* Class: org_apache_hadoop_io_erasurecode_ErasureCodeNative
* Method: loadLibrary
* Signature: ()V
*/
JNIEXPORT void JNICALL Java_org_apache_hadoop_io_erasurecode_ErasureCodeNative_loadLibrary
(JNIEnv *, jclass);
/*
* Class: org_apache_hadoop_io_erasurecode_ErasureCodeNative
* Method: getLibraryName
* Signature: ()Ljava/lang/String;
*/
JNIEXPORT jstring JNICALL Java_org_apache_hadoop_io_erasurecode_ErasureCodeNative_getLibraryName
(JNIEnv *, jclass);
#ifdef __cplusplus
}
#endif
#endif

View File

@ -1,37 +0,0 @@
/* DO NOT EDIT THIS FILE - it is machine generated */
#include <jni.h>
/* Header for class org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawDecoder */
#ifndef _Included_org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawDecoder
#define _Included_org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawDecoder
#ifdef __cplusplus
extern "C" {
#endif
/*
* Class: org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawDecoder
* Method: initImpl
* Signature: (II[I)V
*/
JNIEXPORT void JNICALL Java_org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawDecoder_initImpl
(JNIEnv *, jobject, jint, jint);
/*
* Class: org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawDecoder
* Method: decodeImpl
* Signature: ([Ljava/nio/ByteBuffer;[II[I[Ljava/nio/ByteBuffer;[I)V
*/
JNIEXPORT void JNICALL Java_org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawDecoder_decodeImpl
(JNIEnv *, jobject, jobjectArray, jintArray, jint, jintArray, jobjectArray, jintArray);
/*
* Class: org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawDecoder
* Method: destroyImpl
* Signature: ()V
*/
JNIEXPORT void JNICALL Java_org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawDecoder_destroyImpl
(JNIEnv *, jobject);
#ifdef __cplusplus
}
#endif
#endif

View File

@ -1,37 +0,0 @@
/* DO NOT EDIT THIS FILE - it is machine generated */
#include <jni.h>
/* Header for class org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawEncoder */
#ifndef _Included_org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawEncoder
#define _Included_org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawEncoder
#ifdef __cplusplus
extern "C" {
#endif
/*
* Class: org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawEncoder
* Method: initImpl
* Signature: (II[I)V
*/
JNIEXPORT void JNICALL Java_org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawEncoder_initImpl
(JNIEnv *, jobject, jint, jint);
/*
* Class: org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawEncoder
* Method: encodeImpl
* Signature: ([Ljava/nio/ByteBuffer;[II[Ljava/nio/ByteBuffer;[I)V
*/
JNIEXPORT void JNICALL Java_org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawEncoder_encodeImpl
(JNIEnv *, jobject, jobjectArray, jintArray, jint, jobjectArray, jintArray);
/*
* Class: org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawEncoder
* Method: destroyImpl
* Signature: ()V
*/
JNIEXPORT void JNICALL Java_org_apache_hadoop_io_erasurecode_rawcoder_NativeRSRawEncoder_destroyImpl
(JNIEnv *, jobject);
#ifdef __cplusplus
}
#endif
#endif

View File

@ -0,0 +1,121 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
import org.junit.Assume;
import org.junit.Before;
import org.junit.Test;
/**
* Test native raw Reed-solomon encoding and decoding.
*/
public class TestNativeRSRawCoder extends TestRSRawCoderBase {
@Before
public void setup() {
Assume.assumeTrue(ErasureCodeNative.isNativeCodeLoaded());
this.encoderClass = NativeRSRawEncoder.class;
this.decoderClass = NativeRSRawDecoder.class;
setAllowDump(true);
}
@Test
public void testCoding_6x3_erasing_all_d() {
prepare(null, 6, 3, new int[]{0, 1, 2}, new int[0], true);
testCodingDoMixAndTwice();
}
@Test
public void testCoding_6x3_erasing_d0_d2() {
prepare(null, 6, 3, new int[] {0, 2}, new int[]{});
testCodingDoMixAndTwice();
}
@Test
public void testCoding_6x3_erasing_d0() {
prepare(null, 6, 3, new int[]{0}, new int[0]);
testCodingDoMixAndTwice();
}
@Test
public void testCoding_6x3_erasing_d2() {
prepare(null, 6, 3, new int[]{2}, new int[]{});
testCodingDoMixAndTwice();
}
@Test
public void testCoding_6x3_erasing_d0_p0() {
prepare(null, 6, 3, new int[]{0}, new int[]{0});
testCodingDoMixAndTwice();
}
@Test
public void testCoding_6x3_erasing_all_p() {
prepare(null, 6, 3, new int[0], new int[]{0, 1, 2});
testCodingDoMixAndTwice();
}
@Test
public void testCoding_6x3_erasing_p0() {
prepare(null, 6, 3, new int[0], new int[]{0});
testCodingDoMixAndTwice();
}
@Test
public void testCoding_6x3_erasing_p2() {
prepare(null, 6, 3, new int[0], new int[]{2});
testCodingDoMixAndTwice();
}
@Test
public void testCoding_6x3_erasure_p0_p2() {
prepare(null, 6, 3, new int[0], new int[]{0, 2});
testCodingDoMixAndTwice();
}
@Test
public void testCoding_6x3_erasing_d0_p0_p1() {
prepare(null, 6, 3, new int[]{0}, new int[]{0, 1});
testCodingDoMixAndTwice();
}
@Test
public void testCoding_6x3_erasing_d0_d2_p2() {
prepare(null, 6, 3, new int[]{0, 2}, new int[]{2});
testCodingDoMixAndTwice();
}
@Test
public void testCodingNegative_6x3_erasing_d2_d4() {
prepare(null, 6, 3, new int[]{2, 4}, new int[0]);
testCodingDoMixAndTwice();
}
@Test
public void testCodingNegative_6x3_erasing_too_many() {
prepare(null, 6, 3, new int[]{2, 4}, new int[]{0, 1});
testCodingWithErasingTooMany();
}
@Test
public void testCoding_10x4_erasing_d0_p0() {
prepare(null, 10, 4, new int[] {0}, new int[] {0});
testCodingDoMixAndTwice();
}
}

View File

@ -0,0 +1,38 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
import org.junit.Assume;
import org.junit.Before;
/**
* Test raw Reed-solomon coder implemented in Java.
*/
public class TestRSRawCoderInteroperable1 extends TestRSRawCoderBase {
@Before
public void setup() {
Assume.assumeTrue(ErasureCodeNative.isNativeCodeLoaded());
this.encoderClass = RSRawEncoder.class;
this.decoderClass = NativeRSRawDecoder.class;
setAllowDump(true);
}
}

View File

@ -0,0 +1,38 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
import org.junit.Assume;
import org.junit.Before;
/**
* Test raw Reed-solomon coder implemented in Java.
*/
public class TestRSRawCoderInteroperable2 extends TestRSRawCoderBase {
@Before
public void setup() {
Assume.assumeTrue(ErasureCodeNative.isNativeCodeLoaded());
this.encoderClass = NativeRSRawEncoder.class;
this.decoderClass = RSRawDecoder.class;
setAllowDump(true);
}
}