diff --git a/CHANGES.txt b/CHANGES.txt index d058d9e5fa2..7e6bd7d60cc 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -371,6 +371,7 @@ Release 0.21.0 - Unreleased HBASE-2560 Fix IllegalArgumentException when manually splitting table from web UI HBASE-2657 TestTableResource is broken in trunk + HBASE-2662 TestScannerResource.testScannerResource broke in trunk IMPROVEMENTS HBASE-1760 Cleanup TODOs in HTable diff --git a/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java b/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java index ecc7374119e..9e2d75e9a80 100644 --- a/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java +++ b/src/main/java/org/apache/hadoop/hbase/regionserver/wal/HLog.java @@ -19,6 +19,8 @@ */ package org.apache.hadoop.hbase.regionserver.wal; +import static org.apache.hadoop.hbase.util.FSUtils.recoverFileLease; + import java.io.DataInput; import java.io.DataOutput; import java.io.EOFException; @@ -31,7 +33,6 @@ import java.net.URLEncoder; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; -import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.ListIterator; @@ -56,7 +57,6 @@ import java.util.regex.Pattern; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; @@ -73,14 +73,9 @@ import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.ClassSize; import org.apache.hadoop.hbase.util.FSUtils; import org.apache.hadoop.hbase.util.Threads; -import org.apache.hadoop.hdfs.DistributedFileSystem; -import org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException; -import org.apache.hadoop.hdfs.protocol.FSConstants; -import org.apache.hadoop.io.SequenceFile; import org.apache.hadoop.io.Writable; -import com.google.common.util.concurrent.NamingThreadFactory; -import static org.apache.hadoop.hbase.util.FSUtils.recoverFileLease; +import com.google.common.util.concurrent.NamingThreadFactory; /** * HLog stores all the edits to the HStore. Its the hbase write-ahead-log diff --git a/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java b/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java index 69b5321711f..bc9fb8b7d06 100644 --- a/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java +++ b/src/test/java/org/apache/hadoop/hbase/rest/TestScannerResource.java @@ -49,8 +49,10 @@ import org.apache.hadoop.hbase.util.Bytes; public class TestScannerResource extends HBaseRESTClusterTestBase { static final String TABLE = "TestScannerResource"; - static final String COLUMN_1 = "a:"; - static final String COLUMN_2 = "b:"; + static final String CFA = "a"; + static final String CFB = "b"; + static final String COLUMN_1 = CFA + ":1"; + static final String COLUMN_2 = CFB + ":2"; static int expectedRows1; static int expectedRows2; @@ -103,8 +105,8 @@ public class TestScannerResource extends HBaseRESTClusterTestBase { return; } HTableDescriptor htd = new HTableDescriptor(TABLE); - htd.addFamily(new HColumnDescriptor(COLUMN_1)); - htd.addFamily(new HColumnDescriptor(COLUMN_2)); + htd.addFamily(new HColumnDescriptor(CFA)); + htd.addFamily(new HColumnDescriptor(CFB)); admin.createTable(htd); expectedRows1 = insertData(TABLE, COLUMN_1, 1.0); expectedRows2 = insertData(TABLE, COLUMN_2, 0.5);