HBASE-3514 more Integer.MAX_VALUE for blocksize

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1076464 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Ryan Rawson 2011-03-03 00:09:17 +00:00
parent 7e90ed10b8
commit 5255748de2
4 changed files with 13 additions and 13 deletions

View File

@ -43,16 +43,16 @@ public class ReadWriteConsistencyControl {
/**
* Get this thread's read point. Used primarily by the memstore scanner to
* know which values to skip (ie: have not been completed/committed to
* know which values to skip (ie: have not been completed/committed to
* memstore).
*/
public static long getThreadReadPoint() {
return perThreadReadPoint.get();
}
/**
/**
* Set the thread read point to the given value. The thread RWCC
* is used by the Memstore scanner so it knows which values to skip.
* is used by the Memstore scanner so it knows which values to skip.
* Give it a value of 0 if you want everything.
*/
public static void setThreadReadPoint(long readPoint) {
@ -67,7 +67,7 @@ public class ReadWriteConsistencyControl {
perThreadReadPoint.set(rwcc.memstoreReadPoint());
return getThreadReadPoint();
}
/**
* Set the thread RWCC read point to 0 (include everything).
*/

View File

@ -195,17 +195,17 @@ public abstract class HBaseTestCase extends TestCase {
HTableDescriptor htd = new HTableDescriptor(name);
htd.addFamily(new HColumnDescriptor(fam1, versions,
HColumnDescriptor.DEFAULT_COMPRESSION, false, false,
Integer.MAX_VALUE, HConstants.FOREVER,
HColumnDescriptor.DEFAULT_BLOCKSIZE, HConstants.FOREVER,
HColumnDescriptor.DEFAULT_BLOOMFILTER,
HConstants.REPLICATION_SCOPE_LOCAL));
htd.addFamily(new HColumnDescriptor(fam2, versions,
HColumnDescriptor.DEFAULT_COMPRESSION, false, false,
Integer.MAX_VALUE, HConstants.FOREVER,
HColumnDescriptor.DEFAULT_BLOCKSIZE, HConstants.FOREVER,
HColumnDescriptor.DEFAULT_BLOOMFILTER,
HConstants.REPLICATION_SCOPE_LOCAL));
htd.addFamily(new HColumnDescriptor(fam3, versions,
HColumnDescriptor.DEFAULT_COMPRESSION, false, false,
Integer.MAX_VALUE, HConstants.FOREVER,
HColumnDescriptor.DEFAULT_BLOCKSIZE, HConstants.FOREVER,
HColumnDescriptor.DEFAULT_BLOOMFILTER,
HConstants.REPLICATION_SCOPE_LOCAL));
return htd;

View File

@ -288,7 +288,7 @@ public class TestSerialization {
@Test public void testScan() throws Exception {
byte[] startRow = "startRow".getBytes();
byte[] stopRow = "stopRow".getBytes();
byte[] fam = "fam".getBytes();
@ -569,17 +569,17 @@ public class TestSerialization {
HTableDescriptor htd = new HTableDescriptor(name);
htd.addFamily(new HColumnDescriptor(fam1, versions,
HColumnDescriptor.DEFAULT_COMPRESSION, false, false,
Integer.MAX_VALUE, HConstants.FOREVER,
HColumnDescriptor.DEFAULT_BLOCKSIZE, HConstants.FOREVER,
HColumnDescriptor.DEFAULT_BLOOMFILTER,
HConstants.REPLICATION_SCOPE_LOCAL));
htd.addFamily(new HColumnDescriptor(fam2, versions,
HColumnDescriptor.DEFAULT_COMPRESSION, false, false,
Integer.MAX_VALUE, HConstants.FOREVER,
HColumnDescriptor.DEFAULT_BLOCKSIZE, HConstants.FOREVER,
HColumnDescriptor.DEFAULT_BLOOMFILTER,
HConstants.REPLICATION_SCOPE_LOCAL));
htd.addFamily(new HColumnDescriptor(fam3, versions,
HColumnDescriptor.DEFAULT_COMPRESSION, false, false,
Integer.MAX_VALUE, HConstants.FOREVER,
HColumnDescriptor.DEFAULT_BLOCKSIZE, HConstants.FOREVER,
HColumnDescriptor.DEFAULT_BLOOMFILTER,
HConstants.REPLICATION_SCOPE_LOCAL));
return htd;

View File

@ -499,7 +499,7 @@ public class TestHRegion extends HBaseTestCase {
res = region.checkAndMutate(row1, fam1, qf1, null, put, lockId, true);
assertTrue(res);
}
public void testCheckAndMutate_WithWrongValue() throws IOException{
@ -2783,7 +2783,7 @@ public class TestHRegion extends HBaseTestCase {
HColumnDescriptor.DEFAULT_COMPRESSION,
HColumnDescriptor.DEFAULT_IN_MEMORY,
HColumnDescriptor.DEFAULT_BLOCKCACHE,
Integer.MAX_VALUE, HColumnDescriptor.DEFAULT_TTL,
HColumnDescriptor.DEFAULT_BLOCKSIZE, HColumnDescriptor.DEFAULT_TTL,
"rowcol",
HColumnDescriptor.DEFAULT_REPLICATION_SCOPE);
HTableDescriptor htd = new HTableDescriptor(TABLE);