HBASE-2662 TestScannerResource.testScannerResource broke in trunk

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@951192 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2010-06-03 22:52:42 +00:00
parent c6cfd1b9a6
commit e91448f040
3 changed files with 10 additions and 12 deletions

View File

@ -371,6 +371,7 @@ Release 0.21.0 - Unreleased
HBASE-2560 Fix IllegalArgumentException when manually splitting table HBASE-2560 Fix IllegalArgumentException when manually splitting table
from web UI from web UI
HBASE-2657 TestTableResource is broken in trunk HBASE-2657 TestTableResource is broken in trunk
HBASE-2662 TestScannerResource.testScannerResource broke in trunk
IMPROVEMENTS IMPROVEMENTS
HBASE-1760 Cleanup TODOs in HTable HBASE-1760 Cleanup TODOs in HTable

View File

@ -19,6 +19,8 @@
*/ */
package org.apache.hadoop.hbase.regionserver.wal; package org.apache.hadoop.hbase.regionserver.wal;
import static org.apache.hadoop.hbase.util.FSUtils.recoverFileLease;
import java.io.DataInput; import java.io.DataInput;
import java.io.DataOutput; import java.io.DataOutput;
import java.io.EOFException; import java.io.EOFException;
@ -31,7 +33,6 @@ import java.net.URLEncoder;
import java.util.ArrayList; import java.util.ArrayList;
import java.util.Collections; import java.util.Collections;
import java.util.HashMap; import java.util.HashMap;
import java.util.HashSet;
import java.util.LinkedList; import java.util.LinkedList;
import java.util.List; import java.util.List;
import java.util.ListIterator; import java.util.ListIterator;
@ -56,7 +57,6 @@ import java.util.regex.Pattern;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -73,14 +73,9 @@ import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.ClassSize;
import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.FSUtils;
import org.apache.hadoop.hbase.util.Threads; import org.apache.hadoop.hbase.util.Threads;
import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException;
import org.apache.hadoop.hdfs.protocol.FSConstants;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
import com.google.common.util.concurrent.NamingThreadFactory;
import static org.apache.hadoop.hbase.util.FSUtils.recoverFileLease; import com.google.common.util.concurrent.NamingThreadFactory;
/** /**
* HLog stores all the edits to the HStore. Its the hbase write-ahead-log * HLog stores all the edits to the HStore. Its the hbase write-ahead-log

View File

@ -49,8 +49,10 @@ import org.apache.hadoop.hbase.util.Bytes;
public class TestScannerResource extends HBaseRESTClusterTestBase { public class TestScannerResource extends HBaseRESTClusterTestBase {
static final String TABLE = "TestScannerResource"; static final String TABLE = "TestScannerResource";
static final String COLUMN_1 = "a:"; static final String CFA = "a";
static final String COLUMN_2 = "b:"; static final String CFB = "b";
static final String COLUMN_1 = CFA + ":1";
static final String COLUMN_2 = CFB + ":2";
static int expectedRows1; static int expectedRows1;
static int expectedRows2; static int expectedRows2;
@ -103,8 +105,8 @@ public class TestScannerResource extends HBaseRESTClusterTestBase {
return; return;
} }
HTableDescriptor htd = new HTableDescriptor(TABLE); HTableDescriptor htd = new HTableDescriptor(TABLE);
htd.addFamily(new HColumnDescriptor(COLUMN_1)); htd.addFamily(new HColumnDescriptor(CFA));
htd.addFamily(new HColumnDescriptor(COLUMN_2)); htd.addFamily(new HColumnDescriptor(CFB));
admin.createTable(htd); admin.createTable(htd);
expectedRows1 = insertData(TABLE, COLUMN_1, 1.0); expectedRows1 = insertData(TABLE, COLUMN_1, 1.0);
expectedRows2 = insertData(TABLE, COLUMN_2, 0.5); expectedRows2 = insertData(TABLE, COLUMN_2, 0.5);