From 34f05ef4ae72ae99a877805b3a01d9230f1cf643 Mon Sep 17 00:00:00 2001 From: Michael Stack Date: Mon, 14 Jul 2008 21:38:45 +0000 Subject: [PATCH] HBASE-742 Column length limit is not enforced git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@676748 13f79535-47bb-0310-9956-ffa450edef68 --- CHANGES.txt | 1 + .../hbase/regionserver/HRegionServer.java | 26 +++++++++++++ .../hadoop/hbase/client/TestBatchUpdate.java | 39 +++++++++++++++++++ 3 files changed, 66 insertions(+) diff --git a/CHANGES.txt b/CHANGES.txt index 2de8c733653..925f574151c 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -184,6 +184,7 @@ Trunk (unreleased changes) HBASE-739 HBaseAdmin.createTable() using old HTableDescription doesn't work (Izaak Rubin via Stack) HBASE-744 BloomFilter serialization/deserialization broken + HBASE-742 Column length limit is not enforced (Jean-Daniel Cryans via Stack) IMPROVEMENTS HBASE-559 MR example job to count table rows diff --git a/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index e0c6af5fbe2..8a6a7506730 100644 --- a/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -71,6 +71,7 @@ import org.apache.hadoop.hbase.RemoteExceptionHandler; import org.apache.hadoop.hbase.UnknownScannerException; import org.apache.hadoop.hbase.Leases.LeaseStillHeldException; import org.apache.hadoop.hbase.filter.RowFilterInterface; +import org.apache.hadoop.hbase.io.BatchOperation; import org.apache.hadoop.hbase.io.BatchUpdate; import org.apache.hadoop.hbase.io.Cell; import org.apache.hadoop.hbase.io.HbaseMapWritable; @@ -1146,6 +1147,7 @@ public class HRegionServer implements HConstants, HRegionInterface, Runnable { checkOpen(); this.requestCount.incrementAndGet(); HRegion region = getRegion(regionName); + validateValuesLength(b, region); try { cacheFlusher.reclaimMemcacheMemory(); region.batchUpdate(b); @@ -1158,6 +1160,30 @@ public class HRegionServer implements HConstants, HRegionInterface, Runnable { } } + /** + * Utility method to verify values length + * @param batchUpdate The update to verify + * @throws IOException Thrown if a value is too long + */ + private void validateValuesLength(BatchUpdate batchUpdate, + HRegion region) throws IOException { + HTableDescriptor desc = region.getTableDesc(); + for (Iterator iter = + batchUpdate.iterator(); iter.hasNext();) { + + BatchOperation operation = iter.next(); + int maxLength = + desc.getFamily(HStoreKey.getFamily(operation.getColumn())). + getMaxValueLength(); + if(operation.getValue() != null) + if(operation.getValue().length > maxLength) { + throw new IOException("Value in column " + + Bytes.toString(operation.getColumn()) + " is too long. " + + operation.getValue().length + " instead of " + maxLength); + } + } + } + // // remote scanner interface // diff --git a/src/test/org/apache/hadoop/hbase/client/TestBatchUpdate.java b/src/test/org/apache/hadoop/hbase/client/TestBatchUpdate.java index ba6748daf7a..7925527d57d 100644 --- a/src/test/org/apache/hadoop/hbase/client/TestBatchUpdate.java +++ b/src/test/org/apache/hadoop/hbase/client/TestBatchUpdate.java @@ -38,7 +38,11 @@ import org.apache.hadoop.hbase.util.Bytes; public class TestBatchUpdate extends HBaseClusterTestCase { private static final String CONTENTS_STR = "contents:"; private static final byte [] CONTENTS = Bytes.toBytes(CONTENTS_STR); + private static final String SMALLFAM_STR = "smallfam:"; + private static final byte [] SMALLFAM = Bytes.toBytes(SMALLFAM_STR); + private static final int SMALL_LENGTH = 1; private byte[] value; + private byte[] smallValue; private HTableDescriptor desc = null; private HTable table = null; @@ -49,6 +53,7 @@ public class TestBatchUpdate extends HBaseClusterTestCase { public TestBatchUpdate() throws UnsupportedEncodingException { super(); value = "abcd".getBytes(HConstants.UTF8_ENCODING); + smallValue = "a".getBytes(HConstants.UTF8_ENCODING); } /** @@ -59,6 +64,12 @@ public class TestBatchUpdate extends HBaseClusterTestCase { super.setUp(); this.desc = new HTableDescriptor("test"); desc.addFamily(new HColumnDescriptor(CONTENTS_STR)); + desc.addFamily(new HColumnDescriptor(SMALLFAM, + HColumnDescriptor.DEFAULT_VERSIONS, + HColumnDescriptor.DEFAULT_COMPRESSION, + HColumnDescriptor.DEFAULT_IN_MEMORY, + HColumnDescriptor.DEFAULT_BLOCKCACHE, SMALL_LENGTH, + HColumnDescriptor.DEFAULT_TTL, HColumnDescriptor.DEFAULT_BLOOMFILTER)); HBaseAdmin admin = new HBaseAdmin(conf); admin.createTable(desc); table = new HTable(conf, desc.getName()); @@ -86,4 +97,32 @@ public class TestBatchUpdate extends HBaseClusterTestCase { } } } + + public void testBatchUpdateMaxLength() { + // Test for a single good value + BatchUpdate batchUpdate = new BatchUpdate("row1"); + batchUpdate.put(SMALLFAM, value); + try { + table.commit(batchUpdate); + fail("Value is too long, should throw exception"); + } catch (IOException e) { + // This is expected + } + // Try to see if it's still inserted + try { + Cell cell = table.get("row1", SMALLFAM_STR); + assertNull(cell); + } catch (IOException e) { + e.printStackTrace(); + fail("This is unexpected"); + } + // Try to put a good value + batchUpdate = new BatchUpdate("row1"); + batchUpdate.put(SMALLFAM, smallValue); + try { + table.commit(batchUpdate); + } catch (IOException e) { + fail("Value is long enough, should not throw exception"); + } + } }