diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java index cd8bafa3104..58755cdfb00 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -4827,7 +4827,13 @@ public class HRegion implements HeapSize { // , Writable{ if (idx < results.size() && results.get(idx).matchingQualifier(column.getKey())) { KeyValue kv = results.get(idx); - amount += Bytes.toLong(kv.getBuffer(), kv.getValueOffset(), kv.getValueLength()); + if(kv.getValueLength() == Bytes.SIZEOF_LONG) { + amount += Bytes.toLong(kv.getBuffer(), kv.getValueOffset(), Bytes.SIZEOF_LONG); + } else { + // throw DoNotRetryIOException instead of IllegalArgumentException + throw new DoNotRetryIOException( + "Attempted to increment field that isn't 64 bits wide"); + } idx++; } @@ -4876,11 +4882,10 @@ public class HRegion implements HeapSize { // , Writable{ } } finally { closeRegionOperation(); + long after = EnvironmentEdgeManager.currentTimeMillis(); + this.opMetrics.updateIncrementMetrics(increment.getFamilyMap().keySet(), after - before); } - long after = EnvironmentEdgeManager.currentTimeMillis(); - this.opMetrics.updateIncrementMetrics(increment.getFamilyMap().keySet(), after - before); - if (flush) { // Request a cache flush. Do it outside update lock. requestFlush(); @@ -4930,7 +4935,7 @@ public class HRegion implements HeapSize { // , Writable{ if (!results.isEmpty()) { KeyValue kv = results.get(0); - if(kv.getValueLength() == 8){ + if(kv.getValueLength() == Bytes.SIZEOF_LONG){ byte [] buffer = kv.getBuffer(); int valueOffset = kv.getValueOffset(); result += Bytes.toLong(buffer, valueOffset, Bytes.SIZEOF_LONG); @@ -4986,7 +4991,7 @@ public class HRegion implements HeapSize { // , Writable{ requestFlush(); } if (wrongLength) { - throw new IOException( + throw new DoNotRetryIOException( "Attempted to increment field that isn't 64 bits wide"); } return result; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java index b8e1a48e820..e7faac64212 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestFromClientSide.java @@ -4230,7 +4230,7 @@ public class TestFromClientSide { @Test public void testIncrementWithDeletes() throws Exception { - LOG.info("Starting testIncrement"); + LOG.info("Starting testIncrementWithDeletes"); final byte [] TABLENAME = Bytes.toBytes("testIncrementWithDeletes"); HTable ht = TEST_UTIL.createTable(TABLENAME, FAMILY); final byte[] COLUMN = Bytes.toBytes("column"); @@ -4249,6 +4249,32 @@ public class TestFromClientSide { assertEquals(5, Bytes.toLong(r.getValue(FAMILY, COLUMN))); } + @Test + public void testIncrementingInvalidValue() throws Exception { + LOG.info("Starting testIncrementingInvalidValue"); + final byte [] TABLENAME = Bytes.toBytes("testIncrementingInvalidValue"); + HTable ht = TEST_UTIL.createTable(TABLENAME, FAMILY); + final byte[] COLUMN = Bytes.toBytes("column"); + Put p = new Put(ROW); + // write an integer here (not a Long) + p.add(FAMILY, COLUMN, Bytes.toBytes(5)); + ht.put(p); + try { + ht.incrementColumnValue(ROW, FAMILY, COLUMN, 5); + fail("Should have thrown DoNotRetryIOException"); + } catch (DoNotRetryIOException iox) { + // success + } + Increment inc = new Increment(ROW); + inc.addColumn(FAMILY, COLUMN, 5); + try { + ht.increment(inc); + fail("Should have thrown DoNotRetryIOException"); + } catch (DoNotRetryIOException iox) { + // success + } + } + @Test public void testIncrement() throws Exception { LOG.info("Starting testIncrement");