Fixed negative data and throughput values in an output of class RawErasureCoderBenchmark

This commit is contained in:
FuzzingTeam 2022-10-11 18:14:07 +05:30
parent 03d600fa82
commit 3c249dcb6b
2 changed files with 10 additions and 0 deletions

View File

@ -320,6 +320,7 @@ public final class RawErasureCoderBenchmark {
private ByteBuffer[] decodeInputs = new ByteBuffer[NUM_ALL_UNITS];
public static void configure(int dataSizeMB, int chunkSizeKB) {
Preconditions.checkArgument(dataSizeMB > 0);
chunkSize = chunkSizeKB * 1024;
// buffer size needs to be a multiple of (numDataUnits * chunkSize)
int round = (int) Math.round(

View File

@ -18,6 +18,7 @@
package org.apache.hadoop.io.erasurecode.rawcoder;
import org.apache.hadoop.io.erasurecode.ErasureCodeNative;
import org.junit.Assert;
import org.junit.Assume;
import org.junit.Test;
@ -33,6 +34,14 @@ public class TestRawErasureCoderBenchmark {
RawErasureCoderBenchmark.CODER.DUMMY_CODER, 2, 100, 1024);
RawErasureCoderBenchmark.performBench("decode",
RawErasureCoderBenchmark.CODER.DUMMY_CODER, 5, 150, 100);
try {
RawErasureCoderBenchmark.performBench("decode",
RawErasureCoderBenchmark.CODER.DUMMY_CODER, 5, -150, 100);
Assert.fail("should have thrown an IllegalArgumentException");
} catch (IllegalArgumentException e) {
// intentionally swallow exception
}
}
@Test