HBASE-18587 Fix flaky TestFileIOEngine

This short circuits reads and writes with 0 length and also removes flakiness in TestFileIOEngine

Signed-off-by: Michael Stack <stack@apache.org>
This commit is contained in:
Zach York 2017-08-10 16:55:28 -07:00 committed by Michael Stack
parent 2b88edfd8d
commit 5280c100ff
2 changed files with 89 additions and 59 deletions

View File

@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.io.hfile.CacheableDeserializer;
import org.apache.hadoop.hbase.io.hfile.Cacheable.MemoryType;
import org.apache.hadoop.hbase.nio.ByteBuff;
import org.apache.hadoop.hbase.nio.SingleByteBuff;
import org.apache.hadoop.hbase.shaded.com.google.common.base.Preconditions;
import org.apache.hadoop.util.StringUtils;
/**
@ -122,15 +123,18 @@ public class FileIOEngine implements IOEngine {
@Override
public Cacheable read(long offset, int length, CacheableDeserializer<Cacheable> deserializer)
throws IOException {
Preconditions.checkArgument(length >= 0, "Length of read can not be less than 0.");
ByteBuffer dstBuffer = ByteBuffer.allocate(length);
accessFile(readAccessor, dstBuffer, offset);
// The buffer created out of the fileChannel is formed by copying the data from the file
// Hence in this case there is no shared memory that we point to. Even if the BucketCache evicts
// this buffer from the file the data is already copied and there is no need to ensure that
// the results are not corrupted before consuming them.
if (dstBuffer.limit() != length) {
throw new RuntimeException("Only " + dstBuffer.limit() + " bytes read, " + length
+ " expected");
if (length != 0) {
accessFile(readAccessor, dstBuffer, offset);
// The buffer created out of the fileChannel is formed by copying the data from the file
// Hence in this case there is no shared memory that we point to. Even if the BucketCache evicts
// this buffer from the file the data is already copied and there is no need to ensure that
// the results are not corrupted before consuming them.
if (dstBuffer.limit() != length) {
throw new RuntimeException("Only " + dstBuffer.limit() + " bytes read, " + length
+ " expected");
}
}
return deserializer.deserialize(new SingleByteBuff(dstBuffer), true, MemoryType.EXCLUSIVE);
}
@ -143,6 +147,9 @@ public class FileIOEngine implements IOEngine {
*/
@Override
public void write(ByteBuffer srcBuffer, long offset) throws IOException {
if (!srcBuffer.hasRemaining()) {
return;
}
accessFile(writeAccessor, srcBuffer, offset);
}

View File

@ -18,6 +18,7 @@
*/
package org.apache.hadoop.hbase.io.hfile.bucket;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertTrue;
import java.io.File;
@ -30,6 +31,8 @@ import org.apache.hadoop.hbase.io.hfile.bucket.TestByteBufferIOEngine.BufferGrab
import org.apache.hadoop.hbase.nio.ByteBuff;
import org.apache.hadoop.hbase.testclassification.IOTests;
import org.apache.hadoop.hbase.testclassification.SmallTests;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.experimental.categories.Category;
@ -38,62 +41,82 @@ import org.junit.experimental.categories.Category;
*/
@Category({IOTests.class, SmallTests.class})
public class TestFileIOEngine {
private static final long TOTAL_CAPACITY = 6 * 1024 * 1024; // 6 MB
private static final String[] FILE_PATHS = {"testFileIOEngine1", "testFileIOEngine2",
"testFileIOEngine3"};
private static final long SIZE_PER_FILE = TOTAL_CAPACITY / FILE_PATHS.length; // 2 MB per File
private final static List<Long> boundaryStartPositions = new ArrayList<Long>();
private final static List<Long> boundaryStopPositions = new ArrayList<Long>();
private FileIOEngine fileIOEngine;
static {
boundaryStartPositions.add(0L);
for (int i = 1; i < FILE_PATHS.length; i++) {
boundaryStartPositions.add(SIZE_PER_FILE * i - 1);
boundaryStartPositions.add(SIZE_PER_FILE * i);
boundaryStartPositions.add(SIZE_PER_FILE * i + 1);
}
for (int i = 1; i < FILE_PATHS.length; i++) {
boundaryStopPositions.add(SIZE_PER_FILE * i - 1);
boundaryStopPositions.add(SIZE_PER_FILE * i);
boundaryStopPositions.add(SIZE_PER_FILE * i + 1);
}
boundaryStopPositions.add(SIZE_PER_FILE * FILE_PATHS.length - 1);
}
@Before
public void setUp() throws IOException {
fileIOEngine = new FileIOEngine(TOTAL_CAPACITY, false, FILE_PATHS);
}
@After
public void cleanUp() {
fileIOEngine.shutdown();
for (String filePath : FILE_PATHS) {
File file = new File(filePath);
if (file.exists()) {
file.delete();
}
}
}
@Test
public void testFileIOEngine() throws IOException {
long totalCapacity = 6 * 1024 * 1024; // 6 MB
String[] filePaths = { "testFileIOEngine1", "testFileIOEngine2",
"testFileIOEngine3" };
long sizePerFile = totalCapacity / filePaths.length; // 2 MB per File
List<Long> boundaryStartPositions = new ArrayList<Long>();
boundaryStartPositions.add(0L);
for (int i = 1; i < filePaths.length; i++) {
boundaryStartPositions.add(sizePerFile * i - 1);
boundaryStartPositions.add(sizePerFile * i);
boundaryStartPositions.add(sizePerFile * i + 1);
}
List<Long> boundaryStopPositions = new ArrayList<Long>();
for (int i = 1; i < filePaths.length; i++) {
boundaryStopPositions.add(sizePerFile * i - 1);
boundaryStopPositions.add(sizePerFile * i);
boundaryStopPositions.add(sizePerFile * i + 1);
}
boundaryStopPositions.add(sizePerFile * filePaths.length - 1);
FileIOEngine fileIOEngine = new FileIOEngine(totalCapacity, false, filePaths);
try {
for (int i = 0; i < 500; i++) {
int len = (int) Math.floor(Math.random() * 100);
long offset = (long) Math.floor(Math.random() * totalCapacity % (totalCapacity - len));
if (i < boundaryStartPositions.size()) {
// make the boundary start positon
offset = boundaryStartPositions.get(i);
} else if ((i - boundaryStartPositions.size()) < boundaryStopPositions.size()) {
// make the boundary stop positon
offset = boundaryStopPositions.get(i - boundaryStartPositions.size()) - len + 1;
} else if (i % 2 == 0) {
// make the cross-files block writing/reading
offset = Math.max(1, i % filePaths.length) * sizePerFile - len / 2;
}
byte[] data1 = new byte[len];
for (int j = 0; j < data1.length; ++j) {
data1[j] = (byte) (Math.random() * 255);
}
fileIOEngine.write(ByteBuffer.wrap(data1), offset);
BufferGrabbingDeserializer deserializer = new BufferGrabbingDeserializer();
fileIOEngine.read(offset, len, deserializer);
ByteBuff data2 = deserializer.getDeserializedByteBuff();
for (int j = 0; j < data1.length; ++j) {
assertTrue(data1[j] == data2.get(j));
}
for (int i = 0; i < 500; i++) {
int len = (int) Math.floor(Math.random() * 100) + 1;
long offset = (long) Math.floor(Math.random() * TOTAL_CAPACITY % (TOTAL_CAPACITY - len));
if (i < boundaryStartPositions.size()) {
// make the boundary start positon
offset = boundaryStartPositions.get(i);
} else if ((i - boundaryStartPositions.size()) < boundaryStopPositions.size()) {
// make the boundary stop positon
offset = boundaryStopPositions.get(i - boundaryStartPositions.size()) - len + 1;
} else if (i % 2 == 0) {
// make the cross-files block writing/reading
offset = Math.max(1, i % FILE_PATHS.length) * SIZE_PER_FILE - len / 2;
}
} finally {
fileIOEngine.shutdown();
for (String filePath : filePaths) {
File file = new File(filePath);
if (file.exists()) {
file.delete();
}
byte[] data1 = new byte[len];
for (int j = 0; j < data1.length; ++j) {
data1[j] = (byte) (Math.random() * 255);
}
fileIOEngine.write(ByteBuffer.wrap(data1), offset);
BufferGrabbingDeserializer deserializer = new BufferGrabbingDeserializer();
fileIOEngine.read(offset, len, deserializer);
ByteBuff data2 = deserializer.getDeserializedByteBuff();
assertArrayEquals(data1, data2.array());
}
}
@Test
public void testFileIOEngineHandlesZeroLengthInput() throws IOException {
byte[] data1 = new byte[0];
fileIOEngine.write(ByteBuffer.wrap(data1), 0);
BufferGrabbingDeserializer deserializer = new BufferGrabbingDeserializer();
fileIOEngine.read(0, 0, deserializer);
ByteBuff data2 = deserializer.getDeserializedByteBuff();
assertArrayEquals(data1, data2.array());
}
}