HBASE-16283 Batch Append/Increment will always fail if set ReturnResults to false (Allan Yang)

This commit is contained in:
tedyu 2016-10-17 15:42:06 -07:00
parent e1a6c94033
commit 0c304a049b
5 changed files with 57 additions and 5 deletions

View File

@ -7256,7 +7256,7 @@ public class HRegion implements HeapSize, PropagatingConfigurationObserver, Regi
dropMemstoreContents(); dropMemstoreContents();
} }
// If results is null, then client asked that we not return the calculated results. // If results is null, then client asked that we not return the calculated results.
return results != null && returnResults? Result.create(results): null; return results != null && returnResults? Result.create(results): Result.EMPTY_RESULT;
} finally { } finally {
// Call complete always, even on success. doDelta is doing a Get READ_UNCOMMITTED when it goes // Call complete always, even on success. doDelta is doing a Get READ_UNCOMMITTED when it goes
// to get current value under an exclusive lock so no need so no need to wait to return to // to get current value under an exclusive lock so no need so no need to wait to return to

View File

@ -4448,6 +4448,30 @@ public class TestFromClientSide {
} }
} }
@Test
public void testBatchAppendWithReturnResultFalse() throws Exception {
LOG.info("Starting testBatchAppendWithReturnResultFalse");
final TableName TABLENAME = TableName.valueOf("testBatchAppend");
Table table = TEST_UTIL.createTable(TABLENAME, FAMILY);
Append append1 = new Append(Bytes.toBytes("row1"));
append1.setReturnResults(false);
append1.add(FAMILY, Bytes.toBytes("f1"), Bytes.toBytes("value1"));
Append append2 = new Append(Bytes.toBytes("row1"));
append2.setReturnResults(false);
append2.add(FAMILY, Bytes.toBytes("f1"), Bytes.toBytes("value2"));
List<Append> appends = new ArrayList<>();
appends.add(append1);
appends.add(append2);
Object[] results = new Object[2];
table.batch(appends, results);
assertTrue(results.length == 2);
for(Object r : results) {
Result result = (Result)r;
assertTrue(result.isEmpty());
}
table.close();
}
@Test @Test
public void testAppend() throws Exception { public void testAppend() throws Exception {
LOG.info("Starting testAppend"); LOG.info("Starting testAppend");
@ -4462,7 +4486,7 @@ public class TestFromClientSide {
a.add(FAMILY, QUALIFIERS[0], v1); a.add(FAMILY, QUALIFIERS[0], v1);
a.add(FAMILY, QUALIFIERS[1], v2); a.add(FAMILY, QUALIFIERS[1], v2);
a.setReturnResults(false); a.setReturnResults(false);
assertNullResult(t.append(a)); assertEmptyResult(t.append(a));
a = new Append(ROW); a = new Append(ROW);
a.add(FAMILY, QUALIFIERS[0], v2); a.add(FAMILY, QUALIFIERS[0], v2);

View File

@ -19,10 +19,13 @@
package org.apache.hadoop.hbase.client; package org.apache.hadoop.hbase.client;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail; import static org.junit.Assert.fail;
import java.io.IOException; import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap; import java.util.HashMap;
import java.util.List;
import java.util.Map; import java.util.Map;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
@ -176,6 +179,30 @@ public class TestIncrementsFromClientSide {
} }
} }
@Test
public void testBatchIncrementsWithReturnResultFalse() throws Exception {
LOG.info("Starting testBatchIncrementsWithReturnResultFalse");
final TableName TABLENAME = TableName.valueOf("testBatchAppend");
Table table = TEST_UTIL.createTable(TABLENAME, FAMILY);
Increment inc1 = new Increment(Bytes.toBytes("row2"));
inc1.setReturnResults(false);
inc1.addColumn(FAMILY, Bytes.toBytes("f1"), 1);
Increment inc2 = new Increment(Bytes.toBytes("row2"));
inc2.setReturnResults(false);
inc2.addColumn(FAMILY, Bytes.toBytes("f1"), 1);
List<Increment> incs = new ArrayList<>();
incs.add(inc1);
incs.add(inc2);
Object[] results = new Object[2];
table.batch(incs, results);
assertTrue(results.length == 2);
for(Object r : results) {
Result result = (Result)r;
assertTrue(result.isEmpty());
}
table.close();
}
@Test @Test
public void testIncrementInvalidArguments() throws Exception { public void testIncrementInvalidArguments() throws Exception {
LOG.info("Starting " + this.name.getMethodName()); LOG.info("Starting " + this.name.getMethodName());

View File

@ -132,7 +132,7 @@ public class TestAtomicOperation {
a.setReturnResults(false); a.setReturnResults(false);
a.add(fam1, qual1, Bytes.toBytes(v1)); a.add(fam1, qual1, Bytes.toBytes(v1));
a.add(fam1, qual2, Bytes.toBytes(v2)); a.add(fam1, qual2, Bytes.toBytes(v2));
assertNull(region.append(a, HConstants.NO_NONCE, HConstants.NO_NONCE)); assertTrue(region.append(a, HConstants.NO_NONCE, HConstants.NO_NONCE).isEmpty());
a = new Append(row); a = new Append(row);
a.add(fam1, qual1, Bytes.toBytes(v2)); a.add(fam1, qual1, Bytes.toBytes(v2));
a.add(fam1, qual2, Bytes.toBytes(v1)); a.add(fam1, qual2, Bytes.toBytes(v1));

View File

@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.regionserver.wal;
import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull; import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
import java.io.IOException; import java.io.IOException;
import java.util.Arrays; import java.util.Arrays;
@ -238,7 +239,7 @@ public class TestDurability {
inc1.setReturnResults(false); inc1.setReturnResults(false);
inc1.addColumn(FAMILY, col1, 1); inc1.addColumn(FAMILY, col1, 1);
Result res = region.increment(inc1); Result res = region.increment(inc1);
assertNull(res); assertTrue(res.isEmpty());
} }
private Put newPut(Durability durability) { private Put newPut(Durability durability) {