HADOOP-12011 Allow to dump verbose information to ease debugging in raw erasure coders. Contributed by Kai Zheng

This commit is contained in:
Kai Zheng 2015-06-02 22:05:16 +08:00
parent 014bd32c58
commit 0799e1e4b6
5 changed files with 137 additions and 0 deletions

View File

@ -61,4 +61,7 @@
(vinayakumarb)
HADOOP-11847. Enhance raw coder allowing to read least required inputs in decoding.
(Kai Zheng)
HADOOP-12011. Allow to dump verbose information to ease debugging in raw erasure coders
(Kai Zheng)

View File

@ -0,0 +1,85 @@
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io.erasurecode.rawcoder.util;
import org.apache.hadoop.io.erasurecode.ECChunk;
/**
* A dump utility class for debugging data erasure coding/decoding issues. Don't
* suggest they are used in runtime production codes.
*/
public final class DumpUtil {
private static final String HEX_CHARS_STR = "0123456789ABCDEF";
private static final char[] HEX_CHARS = HEX_CHARS_STR.toCharArray();
private DumpUtil() {
// No called
}
/**
* Convert bytes into format like 0x02 02 00 80.
*/
public static String bytesToHex(byte[] bytes, int limit) {
if (limit > bytes.length) {
limit = bytes.length;
}
int len = limit * 2;
len += limit; // for ' ' appended for each char
len += 2; // for '0x' prefix
char[] hexChars = new char[len];
hexChars[0] = '0';
hexChars[1] = 'x';
for (int j = 0; j < limit; j++) {
int v = bytes[j] & 0xFF;
hexChars[j * 3 + 2] = HEX_CHARS[v >>> 4];
hexChars[j * 3 + 3] = HEX_CHARS[v & 0x0F];
hexChars[j * 3 + 4] = ' ';
}
return new String(hexChars);
}
/**
* Print data in hex format in an array of chunks.
* @param header
* @param chunks
*/
public static void dumpChunks(String header, ECChunk[] chunks) {
System.out.println();
System.out.println(header);
for (int i = 0; i < chunks.length; i++) {
dumpChunk(chunks[i]);
}
System.out.println();
}
/**
* Print data in hex format in a chunk.
* @param chunk
*/
public static void dumpChunk(ECChunk chunk) {
String str;
if (chunk == null) {
str = "<EMPTY>";
} else {
byte[] bytes = chunk.toBytesArray();
str = DumpUtil.bytesToHex(bytes, 16);
}
System.out.println(str);
}
}

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.io.erasurecode;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.erasurecode.rawcoder.util.DumpUtil;
import java.nio.ByteBuffer;
import java.util.Arrays;
@ -32,6 +33,8 @@ import static org.junit.Assert.assertTrue;
public abstract class TestCoderBase {
protected static Random RAND = new Random();
private boolean allowDump = true;
private Configuration conf;
protected int numDataUnits;
protected int numParityUnits;
@ -67,6 +70,15 @@ public abstract class TestCoderBase {
this.zeroChunkBytes = new byte[chunkSize]; // With ZERO by default
}
/**
* Set true during setup if want to dump test settings and coding data,
* useful in debugging.
* @param allowDump
*/
protected void setAllowDump(boolean allowDump) {
this.allowDump = allowDump;
}
/**
* Prepare before running the case.
* @param conf
@ -430,6 +442,36 @@ public abstract class TestCoderBase {
return bytesArr;
}
/**
* Dump all the settings used in the test case if allowDump is enabled.
*/
protected void dumpSetting() {
if (allowDump) {
StringBuilder sb = new StringBuilder("Erasure coder test settings:\n");
sb.append(" numDataUnits=").append(numDataUnits);
sb.append(" numParityUnits=").append(numParityUnits);
sb.append(" chunkSize=").append(chunkSize).append("\n");
sb.append(" erasedDataIndexes=").
append(Arrays.toString(erasedDataIndexes));
sb.append(" erasedParityIndexes=").
append(Arrays.toString(erasedParityIndexes));
sb.append(" usingDirectBuffer=").append(usingDirectBuffer).append("\n");
System.out.println(sb.toString());
}
}
/**
* Dump chunks prefixed with a header if allowDump is enabled.
* @param header
* @param chunks
*/
protected void dumpChunks(String header, ECChunk[] chunks) {
if (allowDump) {
DumpUtil.dumpChunks(header, chunks);
}
}
/**
* Make some chunk messy or not correct any more

View File

@ -29,6 +29,7 @@ public class TestRSRawCoder extends TestRSRawCoderBase {
public void setup() {
this.encoderClass = RSRawEncoder.class;
this.decoderClass = RSRawDecoder.class;
setAllowDump(false); // Change to true to allow verbose dump for debugging
}
@Test

View File

@ -126,11 +126,14 @@ public abstract class TestRawCoderBase extends TestCoderBase {
boolean useBadInput, boolean useBadOutput) {
setChunkSize(chunkSize);
dumpSetting();
// Generate data and encode
ECChunk[] dataChunks = prepareDataChunksForEncoding();
if (useBadInput) {
corruptSomeChunk(dataChunks);
}
dumpChunks("Testing data chunks", dataChunks);
ECChunk[] parityChunks = prepareParityChunksForEncoding();
@ -139,6 +142,7 @@ public abstract class TestRawCoderBase extends TestCoderBase {
ECChunk[] clonedDataChunks = cloneChunksWithData(dataChunks);
encoder.encode(dataChunks, parityChunks);
dumpChunks("Encoded parity chunks", parityChunks);
// Backup and erase some chunks
ECChunk[] backupChunks = backupAndEraseChunks(clonedDataChunks, parityChunks);
@ -155,7 +159,9 @@ public abstract class TestRawCoderBase extends TestCoderBase {
corruptSomeChunk(recoveredChunks);
}
dumpChunks("Decoding input chunks", inputChunks);
decoder.decode(inputChunks, getErasedIndexesForDecoding(), recoveredChunks);
dumpChunks("Decoded/recovered chunks", recoveredChunks);
// Compare
compareAndVerify(backupChunks, recoveredChunks);