From d280d4d0ecc0c4281b0c0a25cee03bc3d51540c2 Mon Sep 17 00:00:00 2001 From: Jim Kellerman Date: Sun, 24 Feb 2008 09:22:10 +0000 Subject: [PATCH] HBASE-468 Move HStoreKey back to o.a.h.h git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@630605 13f79535-47bb-0310-9956-ffa450edef68 --- CHANGES.txt | 1 + src/java/org/apache/hadoop/hbase/HMerge.java | 2 - .../hadoop/hbase/HScannerInterface.java | 1 - .../hbase/{regionserver => }/HStoreKey.java | 6 +-- .../hadoop/hbase/client/HBaseAdmin.java | 5 +-- .../hbase/client/HConnectionManager.java | 9 +++-- .../apache/hadoop/hbase/client/HTable.java | 8 +++- .../hadoop/hbase/hql/SelectCommand.java | 2 +- .../hadoop/hbase/io/HbaseMapWritable.java | 4 +- .../hadoop/hbase/mapred/GroupingTableMap.java | 4 +- .../hadoop/hbase/mapred/IdentityTableMap.java | 4 +- .../hadoop/hbase/mapred/TableInputFormat.java | 6 ++- .../apache/hadoop/hbase/mapred/TableMap.java | 4 +- .../apache/hadoop/hbase/master/HMaster.java | 2 +- .../apache/hadoop/hbase/master/RowMap.java | 2 +- .../hbase/regionserver/HAbstractScanner.java | 3 +- .../hadoop/hbase/regionserver/HLog.java | 9 +++-- .../hadoop/hbase/regionserver/HRegion.java | 12 +++++- .../hbase/regionserver/HRegionServer.java | 4 +- .../hadoop/hbase/regionserver/HStore.java | 40 +++++++++++-------- .../hadoop/hbase/regionserver/HStoreFile.java | 29 +++++++++++++- .../hadoop/hbase/rest/ScannerHandler.java | 2 +- .../hadoop/hbase/thrift/ThriftServer.java | 2 +- .../org/apache/hadoop/hbase/util/Migrate.java | 2 +- .../hadoop/hbase/DisabledTestScanner2.java | 1 - .../apache/hadoop/hbase/HBaseTestCase.java | 1 + .../apache/hadoop/hbase/MultiRegionTable.java | 1 - .../hadoop/hbase/PerformanceEvaluation.java | 16 ++++---- .../org/apache/hadoop/hbase/TestCompare.java | 1 - .../apache/hadoop/hbase/TestHBaseCluster.java | 10 ++--- .../apache/hadoop/hbase/TestScannerAPI.java | 1 - .../hadoop/hbase/client/TestBatchUpdate.java | 2 +- .../hadoop/hbase/client/TestHTable.java | 2 +- .../hadoop/hbase/mapred/TestTableIndex.java | 2 +- .../hbase/mapred/TestTableMapReduce.java | 2 +- .../hbase/regionserver/TestCompaction.java | 1 + .../hadoop/hbase/regionserver/TestGet2.java | 8 ++-- .../hadoop/hbase/regionserver/TestHLog.java | 1 + .../hbase/regionserver/TestHMemcache.java | 1 + .../hbase/regionserver/TestHRegion.java | 2 +- .../hbase/regionserver/TestHStoreFile.java | 1 + .../regionserver/TestRegionServerExit.java | 1 + .../hbase/regionserver/TestScanner.java | 1 + .../hadoop/hbase/regionserver/TestSplit.java | 1 + .../hbase/regionserver/TestTimestamp.java | 1 + 45 files changed, 143 insertions(+), 77 deletions(-) rename src/java/org/apache/hadoop/hbase/{regionserver => }/HStoreKey.java (98%) diff --git a/CHANGES.txt b/CHANGES.txt index 88b70d59cab..7b5d7147fca 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -64,6 +64,7 @@ Hbase Change Log HBASE-419 Move RegionServer and related classes into regionserver package HBASE-457 Factor Master into Master, RegionManager, and ServerManager HBASE-464 HBASE-419 introduced javadoc errors + HBASE-468 Move HStoreKey back to o.a.h.h Branch 0.1 diff --git a/src/java/org/apache/hadoop/hbase/HMerge.java b/src/java/org/apache/hadoop/hbase/HMerge.java index d8889326d13..6e699a3aa1d 100644 --- a/src/java/org/apache/hadoop/hbase/HMerge.java +++ b/src/java/org/apache/hadoop/hbase/HMerge.java @@ -40,7 +40,6 @@ import org.apache.hadoop.hbase.client.HConnectionManager; import org.apache.hadoop.hbase.regionserver.HLog; import org.apache.hadoop.hbase.regionserver.HRegion; -import org.apache.hadoop.hbase.regionserver.HStoreKey; /** * A non-instantiable class that has a static method capable of compacting @@ -355,7 +354,6 @@ class HMerge implements HConstants { oldRegion2 }; for(int r = 0; r < regionsToDelete.length; r++) { - long lockid = Math.abs(rand.nextLong()); BatchUpdate b = new BatchUpdate(regionsToDelete[r]); b.delete(COL_REGIONINFO); b.delete(COL_SERVER); diff --git a/src/java/org/apache/hadoop/hbase/HScannerInterface.java b/src/java/org/apache/hadoop/hbase/HScannerInterface.java index e5302d4132e..fe147294500 100644 --- a/src/java/org/apache/hadoop/hbase/HScannerInterface.java +++ b/src/java/org/apache/hadoop/hbase/HScannerInterface.java @@ -26,7 +26,6 @@ import java.util.Map; import java.util.SortedMap; import org.apache.hadoop.io.Text; -import org.apache.hadoop.hbase.regionserver.HStoreKey; /** * HScannerInterface iterates through a set of rows. It's implemented by diff --git a/src/java/org/apache/hadoop/hbase/regionserver/HStoreKey.java b/src/java/org/apache/hadoop/hbase/HStoreKey.java similarity index 98% rename from src/java/org/apache/hadoop/hbase/regionserver/HStoreKey.java rename to src/java/org/apache/hadoop/hbase/HStoreKey.java index 8847765cf62..dbc740dcafa 100644 --- a/src/java/org/apache/hadoop/hbase/regionserver/HStoreKey.java +++ b/src/java/org/apache/hadoop/hbase/HStoreKey.java @@ -17,12 +17,10 @@ * See the License for the specific language governing permissions and * limitations under the License. */ -package org.apache.hadoop.hbase.regionserver; +package org.apache.hadoop.hbase; import org.apache.hadoop.hbase.io.TextSequence; -import org.apache.hadoop.hbase.InvalidColumnNameException; import org.apache.hadoop.io.*; -import org.apache.hadoop.hbase.HConstants; import java.io.*; import java.nio.ByteBuffer; @@ -227,6 +225,7 @@ public class HStoreKey implements WritableComparable { // Comparable + /** {@inheritDoc} */ public int compareTo(Object o) { HStoreKey other = (HStoreKey)o; int result = this.row.compareTo(other.row); @@ -286,6 +285,7 @@ public class HStoreKey implements WritableComparable { * Extracts the column family name from a column * For example, returns 'info' if the specified column was 'info:server' * @param col name of column + * @param withColon set to true if colon separator should be returned * @return column famile as a TextSequence based on the passed * col. If col is reused, make a new Text of * the result by calling {@link TextSequence#toText()}. diff --git a/src/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/src/java/org/apache/hadoop/hbase/client/HBaseAdmin.java index 6d4e58e87e3..ffa6b8ca797 100644 --- a/src/java/org/apache/hadoop/hbase/client/HBaseAdmin.java +++ b/src/java/org/apache/hadoop/hbase/client/HBaseAdmin.java @@ -34,6 +34,7 @@ import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.hbase.master.HMasterInterface; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.MasterNotRunningException; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -42,10 +43,8 @@ import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.TableExistsException; import org.apache.hadoop.hbase.RemoteExceptionHandler; import org.apache.hadoop.hbase.HRegionInfo; -import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.regionserver.HRegionInterface; -import org.apache.hadoop.hbase.regionserver.HStoreKey; /** * Provides administrative functions for HBase @@ -118,7 +117,6 @@ public class HBaseAdmin implements HConstants { * * @throws IllegalArgumentException if the table name is reserved * @throws MasterNotRunningException if master is not running - * @throws NoServerForRegionException if root region is not being served * @throws TableExistsException if table already exists (If concurrent * threads, the table may have been created between test-for-existence * and attempt-at-creation). @@ -155,7 +153,6 @@ public class HBaseAdmin implements HConstants { * * @throws IllegalArgumentException if the table name is reserved * @throws MasterNotRunningException if master is not running - * @throws NoServerForRegionException if root region is not being served * @throws TableExistsException if table already exists (If concurrent * threads, the table may have been created between test-for-existence * and attempt-at-creation). diff --git a/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java b/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java index 76d42a2a8b2..17c35ccea32 100644 --- a/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java +++ b/src/java/org/apache/hadoop/hbase/client/HConnectionManager.java @@ -39,6 +39,7 @@ import org.apache.hadoop.io.Writable; import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.hbase.master.HMasterInterface; import org.apache.hadoop.hbase.util.SoftSortedMap; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -46,14 +47,12 @@ import org.apache.hadoop.hbase.MasterNotRunningException; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.HServerAddress; import org.apache.hadoop.hbase.LocalHBaseCluster; -import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.NoServerForRegionException; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.RemoteExceptionHandler; import org.apache.hadoop.hbase.regionserver.HRegionInterface; -import org.apache.hadoop.hbase.regionserver.HStoreKey; /** * A non-instantiable class that manages connections to multiple tables in @@ -130,7 +129,7 @@ public class HConnectionManager implements HConstants { private Map> cachedRegionLocations = new ConcurrentHashMap>();; + SoftSortedMap>(); /** * constructor @@ -300,7 +299,7 @@ public class HConnectionManager implements HConstants { continue; } finally { - if (scannerId != -1L) { + if (scannerId != -1L && server != null) { server.close(scannerId); } } @@ -309,11 +308,13 @@ public class HConnectionManager implements HConstants { return uniqueTables.toArray(new HTableDescriptor[uniqueTables.size()]); } + /** {@inheritDoc} */ public HRegionLocation locateRegion(Text tableName, Text row) throws IOException{ return locateRegion(tableName, row, true); } + /** {@inheritDoc} */ public HRegionLocation relocateRegion(Text tableName, Text row) throws IOException{ return locateRegion(tableName, row, false); diff --git a/src/java/org/apache/hadoop/hbase/client/HTable.java b/src/java/org/apache/hadoop/hbase/client/HTable.java index 8eac663b112..b41779936cf 100644 --- a/src/java/org/apache/hadoop/hbase/client/HTable.java +++ b/src/java/org/apache/hadoop/hbase/client/HTable.java @@ -47,12 +47,12 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.HScannerInterface; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.RemoteExceptionHandler; import org.apache.hadoop.hbase.NotServingRegionException; -import org.apache.hadoop.hbase.regionserver.HStoreKey; import org.apache.hadoop.hbase.regionserver.HRegionInterface; /** @@ -93,6 +93,7 @@ public class HTable implements HConstants { * Find region location hosting passed row using cached info * @param row Row to find. * @return Location of row. + * @throws IOException */ public HRegionLocation getRegionLocation(Text row) throws IOException { return this.connection.locateRegion(this.tableName, row); @@ -744,6 +745,8 @@ public class HTable implements HConstants { /** * Commit a BatchUpdate to the table. + * @param batchUpdate + * @throws IOException */ public synchronized void commit(final BatchUpdate batchUpdate) throws IOException { @@ -1024,6 +1027,9 @@ public class HTable implements HConstants { return null; } + /** + * Does nothing anymore + */ @Deprecated public void close() { // do nothing... diff --git a/src/java/org/apache/hadoop/hbase/hql/SelectCommand.java b/src/java/org/apache/hadoop/hbase/hql/SelectCommand.java index da4ecbdb0ec..a4926febcd3 100644 --- a/src/java/org/apache/hadoop/hbase/hql/SelectCommand.java +++ b/src/java/org/apache/hadoop/hbase/hql/SelectCommand.java @@ -31,7 +31,7 @@ import java.util.TreeMap; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HScannerInterface; -import org.apache.hadoop.hbase.regionserver.HStoreKey; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.Shell; import org.apache.hadoop.hbase.filter.RowFilterInterface; diff --git a/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java b/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java index 246785da623..67f5f3d7bdf 100644 --- a/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java +++ b/src/java/org/apache/hadoop/hbase/io/HbaseMapWritable.java @@ -34,7 +34,7 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; import org.apache.hadoop.util.ReflectionUtils; -import org.apache.hadoop.hbase.regionserver.HStoreKey; +import org.apache.hadoop.hbase.HStoreKey; /** * A Writable Map. @@ -167,6 +167,7 @@ public class HbaseMapWritable implements Map, Writable, return b; } + /** {@inheritDoc} */ public void write(DataOutput out) throws IOException { // Write out the number of entries in the map out.writeInt(instance.size()); @@ -180,6 +181,7 @@ public class HbaseMapWritable implements Map, Writable, } } + /** {@inheritDoc} */ public void readFields(DataInput in) throws IOException { // First clear the map. Otherwise we will just accumulate // entries every time this method is called. diff --git a/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java b/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java index 88b9f100cb4..2816ffbc9f2 100644 --- a/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java +++ b/src/java/org/apache/hadoop/hbase/mapred/GroupingTableMap.java @@ -25,7 +25,7 @@ import java.util.ArrayList; import java.util.Map; import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.regionserver.HStoreKey; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.io.Text; @@ -85,7 +85,7 @@ public class GroupingTableMap extends TableMap { * Pass the new key and value to reduce. * If any of the grouping columns are not found in the value, the record is skipped. * - * @see org.apache.hadoop.hbase.mapred.TableMap#map(org.apache.hadoop.hbase.regionserver.HStoreKey, org.apache.hadoop.io.MapWritable, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) + * @see org.apache.hadoop.hbase.mapred.TableMap#map(org.apache.hadoop.hbase.HStoreKey, org.apache.hadoop.io.MapWritable, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) */ @Override public void map(@SuppressWarnings("unused") HStoreKey key, diff --git a/src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java b/src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java index 91a0548811f..1bb255f1a33 100644 --- a/src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java +++ b/src/java/org/apache/hadoop/hbase/mapred/IdentityTableMap.java @@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.mapred; import java.io.IOException; -import org.apache.hadoop.hbase.regionserver.HStoreKey; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.mapred.OutputCollector; @@ -41,7 +41,7 @@ public class IdentityTableMap extends TableMap { /** * Pass the key, value to reduce * - * @see org.apache.hadoop.hbase.mapred.TableMap#map(org.apache.hadoop.hbase.regionserver.HStoreKey, org.apache.hadoop.io.MapWritable, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) + * @see org.apache.hadoop.hbase.mapred.TableMap#map(org.apache.hadoop.hbase.HStoreKey, org.apache.hadoop.io.MapWritable, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) */ @Override public void map(HStoreKey key, MapWritable value, diff --git a/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java b/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java index 6066c8865fb..2c691cff888 100644 --- a/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java +++ b/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java @@ -38,7 +38,7 @@ import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HScannerInterface; -import org.apache.hadoop.hbase.regionserver.HStoreKey; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.client.HTable; @@ -84,6 +84,7 @@ implements InputFormat, JobConfigurable { } } + /** {@inheritDoc} */ public void close() throws IOException { this.m_scanner.close(); } @@ -147,6 +148,7 @@ implements InputFormat, JobConfigurable { } + /** {@inheritDoc} */ public RecordReader getRecordReader( InputSplit split, @SuppressWarnings("unused") JobConf job, @@ -178,6 +180,7 @@ implements InputFormat, JobConfigurable { return splits; } + /** {@inheritDoc} */ public void configure(JobConf job) { Path[] tableNames = job.getInputPaths(); m_tableName = new Text(tableNames[0].getName()); @@ -194,6 +197,7 @@ implements InputFormat, JobConfigurable { } } + /** {@inheritDoc} */ public void validateInput(JobConf job) throws IOException { // expecting exactly one path Path[] tableNames = job.getInputPaths(); diff --git a/src/java/org/apache/hadoop/hbase/mapred/TableMap.java b/src/java/org/apache/hadoop/hbase/mapred/TableMap.java index 66f91224df6..f9d12eb51d8 100644 --- a/src/java/org/apache/hadoop/hbase/mapred/TableMap.java +++ b/src/java/org/apache/hadoop/hbase/mapred/TableMap.java @@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.mapred; import java.io.IOException; import org.apache.hadoop.fs.Path; -import org.apache.hadoop.hbase.regionserver.HStoreKey; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Writable; @@ -37,6 +37,8 @@ import org.apache.hadoop.mapred.Reporter; * Scan an HBase table to sort by a specified sort column. * If the column does not exist, the record is not passed to Reduce. * + * @param WritableComparable key class + * @param Writable value class */ @SuppressWarnings("unchecked") public abstract class TableMap diff --git a/src/java/org/apache/hadoop/hbase/master/HMaster.java b/src/java/org/apache/hadoop/hbase/master/HMaster.java index 015d5bea8f0..f1a80a03ec8 100644 --- a/src/java/org/apache/hadoop/hbase/master/HMaster.java +++ b/src/java/org/apache/hadoop/hbase/master/HMaster.java @@ -60,6 +60,7 @@ import org.apache.hadoop.ipc.Server; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.Leases; import org.apache.hadoop.hbase.HServerAddress; import org.apache.hadoop.hbase.HColumnDescriptor; @@ -80,7 +81,6 @@ import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.regionserver.HRegionInterface; import org.apache.hadoop.hbase.regionserver.HRegion; -import org.apache.hadoop.hbase.regionserver.HStoreKey; /** * HMaster is the "master server" for a HBase. diff --git a/src/java/org/apache/hadoop/hbase/master/RowMap.java b/src/java/org/apache/hadoop/hbase/master/RowMap.java index 83bc3630045..e0877ba9f58 100644 --- a/src/java/org/apache/hadoop/hbase/master/RowMap.java +++ b/src/java/org/apache/hadoop/hbase/master/RowMap.java @@ -27,9 +27,9 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.io.Text; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.io.HbaseMapWritable; import org.apache.hadoop.io.Writable; -import org.apache.hadoop.hbase.regionserver.HStoreKey; import org.apache.hadoop.hbase.io.ImmutableBytesWritable; /* diff --git a/src/java/org/apache/hadoop/hbase/regionserver/HAbstractScanner.java b/src/java/org/apache/hadoop/hbase/regionserver/HAbstractScanner.java index edd4e06acc5..a2155b7b6bc 100644 --- a/src/java/org/apache/hadoop/hbase/regionserver/HAbstractScanner.java +++ b/src/java/org/apache/hadoop/hbase/regionserver/HAbstractScanner.java @@ -29,6 +29,7 @@ import java.util.regex.Pattern; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.io.Text; /** @@ -200,7 +201,7 @@ public abstract class HAbstractScanner implements HInternalScannerInterface { * @return true if a match was found * @throws IOException * - * @see org.apache.hadoop.hbase.HScannerInterface#next(org.apache.hadoop.hbase.regionserver.HStoreKey, java.util.SortedMap) + * @see org.apache.hadoop.hbase.HScannerInterface#next(org.apache.hadoop.hbase.HStoreKey, java.util.SortedMap) */ public boolean next(HStoreKey key, SortedMap results) throws IOException { diff --git a/src/java/org/apache/hadoop/hbase/regionserver/HLog.java b/src/java/org/apache/hadoop/hbase/regionserver/HLog.java index c53e8b5c097..b39d7465c98 100644 --- a/src/java/org/apache/hadoop/hbase/regionserver/HLog.java +++ b/src/java/org/apache/hadoop/hbase/regionserver/HLog.java @@ -35,6 +35,7 @@ import java.util.concurrent.locks.ReentrantLock; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.conf.Configuration; +import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.Path; @@ -44,6 +45,7 @@ import org.apache.hadoop.io.SequenceFile.CompressionType; import org.apache.hadoop.io.SequenceFile.Reader; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HBaseConfiguration; @@ -501,7 +503,7 @@ public class HLog implements HConstants { */ public static void splitLog(Path rootDir, Path srcDir, FileSystem fs, Configuration conf) throws IOException { - Path logfiles[] = fs.listPaths(new Path[] { srcDir }); + FileStatus logfiles[] = fs.listStatus(srcDir); LOG.info("splitting " + logfiles.length + " log(s) in " + srcDir.toString()); Map logWriters = @@ -513,14 +515,15 @@ public class HLog implements HConstants { logfiles[i]); } // Check for empty file. - if (fs.getFileStatus(logfiles[i]).getLen() <= 0) { + if (logfiles[i].getLen() <= 0) { LOG.info("Skipping " + logfiles[i].toString() + " because zero length"); continue; } HLogKey key = new HLogKey(); HLogEdit val = new HLogEdit(); - SequenceFile.Reader in = new SequenceFile.Reader(fs, logfiles[i], conf); + SequenceFile.Reader in = + new SequenceFile.Reader(fs, logfiles[i].getPath(), conf); try { int count = 0; for (; in.next(key, val); count++) { diff --git a/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java b/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java index 593b9381ab2..b781ba79b2c 100644 --- a/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java +++ b/src/java/org/apache/hadoop/hbase/regionserver/HRegion.java @@ -50,6 +50,7 @@ import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HRegionInfo; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HScannerInterface; @@ -104,6 +105,11 @@ public class HRegion implements HConstants { * Merge two HRegions. They must be available on the current * HRegionServer. Returns a brand-new active HRegion, also * running on the current HRegionServer. + * + * @param srcA + * @param srcB + * @return new merged HRegion + * @throws IOException */ public static HRegion closeAndMerge(final HRegion srcA, final HRegion srcB) throws IOException { @@ -538,6 +544,7 @@ public class HRegion implements HConstants { ////////////////////////////////////////////////////////////////////////////// /** + * @param midkey * @return returns size of largest HStore. Also returns whether store is * splitable or not (Its not splitable if region has a store that has a * reference store file). @@ -769,6 +776,8 @@ public class HRegion implements HConstants { * Note that no locking is necessary at this level because compaction only * conflicts with a region split, and that cannot happen because the region * server does them sequentially and not in parallel. + * + * @throws IOException */ public boolean compactStores() throws IOException { if (this.closed.get()) { @@ -1611,7 +1620,8 @@ public class HRegion implements HConstants { public String toString() { return regionInfo.getRegionName().toString(); } - + + /** @return Path of region base directory */ public Path getBaseDir() { return this.basedir; } diff --git a/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java b/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java index 01dc11f62ce..bbce87905a5 100644 --- a/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java +++ b/src/java/org/apache/hadoop/hbase/regionserver/HRegionServer.java @@ -70,6 +70,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HServerInfo; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HMsg; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.Leases; import org.apache.hadoop.hbase.HServerAddress; import org.apache.hadoop.hbase.RegionServerRunningException; @@ -1589,7 +1590,8 @@ public class HRegionServer implements HConstants, HRegionInterface, Runnable { public HServerInfo getServerInfo() { return this.serverInfo; } - + + /** @return the info server */ public InfoServer getInfoServer() { return infoServer; } diff --git a/src/java/org/apache/hadoop/hbase/regionserver/HStore.java b/src/java/org/apache/hadoop/hbase/regionserver/HStore.java index 1dc76d11c69..0c1067842a4 100644 --- a/src/java/org/apache/hadoop/hbase/regionserver/HStore.java +++ b/src/java/org/apache/hadoop/hbase/regionserver/HStore.java @@ -39,6 +39,7 @@ import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataOutputStream; +import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.filter.RowFilterInterface; @@ -60,6 +61,7 @@ import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.RemoteExceptionHandler; @@ -221,11 +223,12 @@ public class HStore implements HConstants { } /** - * Find the key that matches row exactly, or the one that immediately - * preceeds it. + * @param row + * @param timestamp + * @return the key that matches row exactly, or the one that + * immediately preceeds it. */ - public Text getRowKeyAtOrBefore(final Text row, long timestamp) - throws IOException{ + public Text getRowKeyAtOrBefore(final Text row, long timestamp) { this.lock.readLock().lock(); Text key_memcache = null; @@ -246,17 +249,17 @@ public class HStore implements HConstants { return key_snapshot; } else if (key_memcache != null && key_snapshot == null) { return key_memcache; - } else { - // if either is a precise match, return the original row. - if ( (key_memcache != null && key_memcache.equals(row)) + } else if ( (key_memcache != null && key_memcache.equals(row)) || (key_snapshot != null && key_snapshot.equals(row)) ) { - return row; - } + // if either is a precise match, return the original row. + return row; + } else if (key_memcache != null) { // no precise matches, so return the one that is closer to the search // key (greatest) return key_memcache.compareTo(key_snapshot) > 0 ? - key_memcache : key_snapshot; + key_memcache : key_snapshot; } + return null; } finally { this.lock.readLock().unlock(); } @@ -869,10 +872,11 @@ public class HStore implements HConstants { } // Look first at info files. If a reference, these contain info we need // to create the HStoreFile. - Path infofiles[] = fs.listPaths(new Path[] {infodir}); + FileStatus infofiles[] = fs.listStatus(infodir); ArrayList results = new ArrayList(infofiles.length); ArrayList mapfiles = new ArrayList(infofiles.length); - for (Path p: infofiles) { + for (int i = 0; i < infofiles.length; i++) { + Path p = infofiles[i].getPath(); Matcher m = REF_NAME_PARSER.matcher(p.getName()); /* * * * * * N O T E * * * * * @@ -912,11 +916,12 @@ public class HStore implements HConstants { // List paths by experience returns fully qualified names -- at least when // running on a mini hdfs cluster. - Path datfiles[] = fs.listPaths(new Path[] {mapdir}); + FileStatus datfiles[] = fs.listStatus(mapdir); for (int i = 0; i < datfiles.length; i++) { + Path p = datfiles[i].getPath(); // If does not have sympathetic info file, delete. - if (!mapfiles.contains(fs.makeQualified(datfiles[i]))) { - fs.delete(datfiles[i]); + if (!mapfiles.contains(fs.makeQualified(p))) { + fs.delete(p); } } return results; @@ -1817,8 +1822,11 @@ public class HStore implements HConstants { } /** - * Find the key that matches row exactly, or the one that immediately + * @return the key that matches row exactly, or the one that immediately * preceeds it. + * @param row + * @param timestamp + * @throws IOException */ public Text getRowKeyAtOrBefore(final Text row, final long timestamp) throws IOException{ diff --git a/src/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java b/src/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java index 164e5d05633..a979e4028f8 100644 --- a/src/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java +++ b/src/java/org/apache/hadoop/hbase/regionserver/HStoreFile.java @@ -48,6 +48,7 @@ import org.onelab.filter.Key; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HBaseConfiguration; +import org.apache.hadoop.hbase.HStoreKey; /** * A HStore data file. HStores usually have one or more of these files. They @@ -523,18 +524,34 @@ public class HStoreFile implements HConstants { ((encodedRegionName != null) ? "." + encodedRegionName : ""); } + /** + * @param dir + * @param encodedRegionName + * @param colFamily + * @return path for map file directory + */ public static Path getMapDir(Path dir, String encodedRegionName, Text colFamily) { return new Path(dir, new Path(encodedRegionName, new Path(colFamily.toString(), HSTORE_DATFILE_DIR))); } - /** @return the info directory path */ + /** + * @param dir + * @param encodedRegionName + * @param colFamily + * @return the info directory path + */ public static Path getInfoDir(Path dir, String encodedRegionName, Text colFamily) { return new Path(dir, new Path(encodedRegionName, new Path(colFamily.toString(), HSTORE_INFO_DIR))); } - /** @return the bloom filter directory path */ + /** + * @param dir + * @param encodedRegionName + * @param colFamily + * @return the bloom filter directory path + */ public static Path getFilterDir(Path dir, String encodedRegionName, Text colFamily) { return new Path(dir, new Path(encodedRegionName, new Path(colFamily.toString(), HSTORE_FILTER_DIR))); @@ -723,6 +740,14 @@ public class HStoreFile implements HConstants { bloomFilter = filter; } + /** + * @param fs + * @param dirName + * @param conf + * @param filter + * @param blockCacheEnabled + * @throws IOException + */ public Reader(FileSystem fs, String dirName, Configuration conf, final Filter filter, final boolean blockCacheEnabled) throws IOException { diff --git a/src/java/org/apache/hadoop/hbase/rest/ScannerHandler.java b/src/java/org/apache/hadoop/hbase/rest/ScannerHandler.java index 80837ef64a1..0aba262dc9c 100644 --- a/src/java/org/apache/hadoop/hbase/rest/ScannerHandler.java +++ b/src/java/org/apache/hadoop/hbase/rest/ScannerHandler.java @@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HScannerInterface; -import org.apache.hadoop.hbase.regionserver.HStoreKey; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.util.JenkinsHash; import org.apache.hadoop.io.Text; import org.mortbay.servlet.MultiPartResponse; diff --git a/src/java/org/apache/hadoop/hbase/thrift/ThriftServer.java b/src/java/org/apache/hadoop/hbase/thrift/ThriftServer.java index fcf84eb30f1..49ec15c0a3b 100644 --- a/src/java/org/apache/hadoop/hbase/thrift/ThriftServer.java +++ b/src/java/org/apache/hadoop/hbase/thrift/ThriftServer.java @@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HScannerInterface; -import org.apache.hadoop.hbase.regionserver.HStoreKey; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.MasterNotRunningException; import org.apache.hadoop.hbase.thrift.generated.AlreadyExists; diff --git a/src/java/org/apache/hadoop/hbase/util/Migrate.java b/src/java/org/apache/hadoop/hbase/util/Migrate.java index 809f453ac11..2c84c9a96d4 100644 --- a/src/java/org/apache/hadoop/hbase/util/Migrate.java +++ b/src/java/org/apache/hadoop/hbase/util/Migrate.java @@ -58,12 +58,12 @@ import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HScannerInterface; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.MasterNotRunningException; import org.apache.hadoop.hbase.regionserver.HLog; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HStore; -import org.apache.hadoop.hbase.regionserver.HStoreKey; /** * Perform a file system upgrade to convert older file layouts to that diff --git a/src/test/org/apache/hadoop/hbase/DisabledTestScanner2.java b/src/test/org/apache/hadoop/hbase/DisabledTestScanner2.java index 3894e1ec32e..86baa132449 100644 --- a/src/test/org/apache/hadoop/hbase/DisabledTestScanner2.java +++ b/src/test/org/apache/hadoop/hbase/DisabledTestScanner2.java @@ -47,7 +47,6 @@ import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.regionserver.HRegion; -import org.apache.hadoop.hbase.regionserver.HStoreKey; import org.apache.hadoop.hbase.regionserver.HRegionInterface; /** diff --git a/src/test/org/apache/hadoop/hbase/HBaseTestCase.java b/src/test/org/apache/hadoop/hbase/HBaseTestCase.java index bc16b4f8129..03dd0a68a1f 100644 --- a/src/test/org/apache/hadoop/hbase/HBaseTestCase.java +++ b/src/test/org/apache/hadoop/hbase/HBaseTestCase.java @@ -330,6 +330,7 @@ public abstract class HBaseTestCase extends TestCase { public static interface Incommon { /** * @param row + * @return update id * @throws IOException */ public long startUpdate(Text row) throws IOException; diff --git a/src/test/org/apache/hadoop/hbase/MultiRegionTable.java b/src/test/org/apache/hadoop/hbase/MultiRegionTable.java index b85733b7f17..ab83dd12fcf 100644 --- a/src/test/org/apache/hadoop/hbase/MultiRegionTable.java +++ b/src/test/org/apache/hadoop/hbase/MultiRegionTable.java @@ -35,7 +35,6 @@ import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegionServer; -import org.apache.hadoop.hbase.regionserver.HStoreKey; /** * Utility class to build a table of multiple regions. diff --git a/src/test/org/apache/hadoop/hbase/PerformanceEvaluation.java b/src/test/org/apache/hadoop/hbase/PerformanceEvaluation.java index 82fcfe9a6b4..69cba3c1716 100644 --- a/src/test/org/apache/hadoop/hbase/PerformanceEvaluation.java +++ b/src/test/org/apache/hadoop/hbase/PerformanceEvaluation.java @@ -48,8 +48,8 @@ import org.apache.hadoop.mapred.TextOutputFormat; import org.apache.log4j.Logger; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.HBaseAdmin; +import org.apache.hadoop.hbase.io.BatchUpdate; -import org.apache.hadoop.hbase.regionserver.HStoreKey; /** * Script used evaluating HBase performance and scalability. Runs a HBase @@ -386,9 +386,9 @@ public class PerformanceEvaluation implements HConstants { @Override void testRow(@SuppressWarnings("unused") final int i) throws IOException { Text row = getRandomRow(this.rand, this.totalRows); - long lockid = table.startUpdate(row); - table.put(lockid, COLUMN_NAME, generateValue(this.rand)); - table.commit(lockid); + BatchUpdate b = new BatchUpdate(row); + b.put(COLUMN_NAME, generateValue(this.rand)); + table.commit(b); } @Override @@ -460,9 +460,9 @@ public class PerformanceEvaluation implements HConstants { @Override void testRow(final int i) throws IOException { - long lockid = table.startUpdate(format(i)); - table.put(lockid, COLUMN_NAME, generateValue(this.rand)); - table.commit(lockid); + BatchUpdate b = new BatchUpdate(format(i)); + b.put(COLUMN_NAME, generateValue(this.rand)); + table.commit(b); } @Override @@ -535,7 +535,7 @@ public class PerformanceEvaluation implements HConstants { return totalElapsedTime; } - private void runNIsOne(final String cmd) throws IOException { + private void runNIsOne(final String cmd) { Status status = new Status() { @SuppressWarnings("unused") public void setStatus(String msg) throws IOException { diff --git a/src/test/org/apache/hadoop/hbase/TestCompare.java b/src/test/org/apache/hadoop/hbase/TestCompare.java index c2436d605c4..3b0e8d22e15 100644 --- a/src/test/org/apache/hadoop/hbase/TestCompare.java +++ b/src/test/org/apache/hadoop/hbase/TestCompare.java @@ -21,7 +21,6 @@ package org.apache.hadoop.hbase; import org.apache.hadoop.io.Text; import junit.framework.TestCase; -import org.apache.hadoop.hbase.regionserver.HStoreKey; /** * Test comparing HBase objects. diff --git a/src/test/org/apache/hadoop/hbase/TestHBaseCluster.java b/src/test/org/apache/hadoop/hbase/TestHBaseCluster.java index 0d17fe6030f..cbc03371a3c 100644 --- a/src/test/org/apache/hadoop/hbase/TestHBaseCluster.java +++ b/src/test/org/apache/hadoop/hbase/TestHBaseCluster.java @@ -27,7 +27,7 @@ import java.util.TreeMap; import org.apache.hadoop.io.Text; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; -import org.apache.hadoop.hbase.regionserver.HStoreKey; +import org.apache.hadoop.hbase.io.BatchUpdate; /** * Test HBase Master and Region servers, client API @@ -97,12 +97,12 @@ public class TestHBaseCluster extends HBaseClusterTestCase { // Write out a bunch of values for (int k = FIRST_ROW; k <= NUM_VALS; k++) { - long writeid = table.startUpdate(new Text("row_" + k)); - table.put(writeid, CONTENTS_BASIC, + BatchUpdate b = new BatchUpdate(new Text("row_" + k)); + b.put(CONTENTS_BASIC, (CONTENTSTR + k).getBytes(HConstants.UTF8_ENCODING)); - table.put(writeid, new Text(ANCHORNUM + k), + b.put(new Text(ANCHORNUM + k), (ANCHORSTR + k).getBytes(HConstants.UTF8_ENCODING)); - table.commit(writeid); + table.commit(b); } System.out.println("Write " + NUM_VALS + " rows. Elapsed time: " + ((System.currentTimeMillis() - startTime) / 1000.0)); diff --git a/src/test/org/apache/hadoop/hbase/TestScannerAPI.java b/src/test/org/apache/hadoop/hbase/TestScannerAPI.java index b8fe26d0a9b..cad8fffc602 100644 --- a/src/test/org/apache/hadoop/hbase/TestScannerAPI.java +++ b/src/test/org/apache/hadoop/hbase/TestScannerAPI.java @@ -31,7 +31,6 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.regionserver.HRegion; -import org.apache.hadoop.hbase.regionserver.HStoreKey; import org.apache.hadoop.hbase.io.BatchUpdate; /** test the scanner API at all levels */ diff --git a/src/test/org/apache/hadoop/hbase/client/TestBatchUpdate.java b/src/test/org/apache/hadoop/hbase/client/TestBatchUpdate.java index 2e2e0bbac8a..a53c871d68f 100644 --- a/src/test/org/apache/hadoop/hbase/client/TestBatchUpdate.java +++ b/src/test/org/apache/hadoop/hbase/client/TestBatchUpdate.java @@ -25,10 +25,10 @@ import java.util.Map; import java.util.TreeMap; import org.apache.hadoop.io.Text; import org.apache.hadoop.hbase.HBaseClusterTestCase; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HScannerInterface; -import org.apache.hadoop.hbase.regionserver.HStoreKey; import org.apache.hadoop.hbase.HColumnDescriptor; /** diff --git a/src/test/org/apache/hadoop/hbase/client/TestHTable.java b/src/test/org/apache/hadoop/hbase/client/TestHTable.java index 19877f67cbd..a54f908dfe8 100644 --- a/src/test/org/apache/hadoop/hbase/client/TestHTable.java +++ b/src/test/org/apache/hadoop/hbase/client/TestHTable.java @@ -30,10 +30,10 @@ import org.apache.commons.logging.LogFactory; import org.apache.hadoop.hbase.HBaseClusterTestCase; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HColumnDescriptor; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.HScannerInterface; -import org.apache.hadoop.hbase.regionserver.HStoreKey; /** * Tests HTable diff --git a/src/test/org/apache/hadoop/hbase/mapred/TestTableIndex.java b/src/test/org/apache/hadoop/hbase/mapred/TestTableIndex.java index f7a0b15f68e..1e4cff2aef0 100644 --- a/src/test/org/apache/hadoop/hbase/mapred/TestTableIndex.java +++ b/src/test/org/apache/hadoop/hbase/mapred/TestTableIndex.java @@ -39,8 +39,8 @@ import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.HScannerInterface; -import org.apache.hadoop.hbase.regionserver.HStoreKey; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.MultiRegionTable; diff --git a/src/test/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java b/src/test/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java index a22b1969a0f..703a104a1c7 100644 --- a/src/test/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java +++ b/src/test/org/apache/hadoop/hbase/mapred/TestTableMapReduce.java @@ -32,8 +32,8 @@ import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HScannerInterface; -import org.apache.hadoop.hbase.regionserver.HStoreKey; import org.apache.hadoop.hbase.client.HTable; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.MultiRegionTable; diff --git a/src/test/org/apache/hadoop/hbase/regionserver/TestCompaction.java b/src/test/org/apache/hadoop/hbase/regionserver/TestCompaction.java index 56737a2c7e5..b99342680da 100644 --- a/src/test/org/apache/hadoop/hbase/regionserver/TestCompaction.java +++ b/src/test/org/apache/hadoop/hbase/regionserver/TestCompaction.java @@ -31,6 +31,7 @@ import org.apache.hadoop.io.MapFile; import org.apache.hadoop.io.Text; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.StaticTestEnvironment; diff --git a/src/test/org/apache/hadoop/hbase/regionserver/TestGet2.java b/src/test/org/apache/hadoop/hbase/regionserver/TestGet2.java index 37c1c4bf1c2..9587ebc2b0d 100644 --- a/src/test/org/apache/hadoop/hbase/regionserver/TestGet2.java +++ b/src/test/org/apache/hadoop/hbase/regionserver/TestGet2.java @@ -30,8 +30,8 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HScannerInterface; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.HTableDescriptor; -import org.apache.hadoop.hbase.HRegionInfo; /** @@ -148,7 +148,10 @@ public class TestGet2 extends HBaseTestCase { } } - /** For HADOOP-2443 */ + /** + * For HADOOP-2443 + * @throws IOException + */ public void testGetClosestRowBefore() throws IOException{ HRegion region = null; @@ -156,7 +159,6 @@ public class TestGet2 extends HBaseTestCase { try { HTableDescriptor htd = createTableDescriptor(getName()); - HRegionInfo hri = new HRegionInfo(htd, null, null); region = createNewHRegion(htd, null, null); region_incommon = new HRegionIncommon(region); diff --git a/src/test/org/apache/hadoop/hbase/regionserver/TestHLog.java b/src/test/org/apache/hadoop/hbase/regionserver/TestHLog.java index e8e1cc27fb5..9490dd9c088 100644 --- a/src/test/org/apache/hadoop/hbase/regionserver/TestHLog.java +++ b/src/test/org/apache/hadoop/hbase/regionserver/TestHLog.java @@ -30,6 +30,7 @@ import org.apache.hadoop.io.SequenceFile.Reader; import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.StaticTestEnvironment; /** JUnit test case for HLog */ diff --git a/src/test/org/apache/hadoop/hbase/regionserver/TestHMemcache.java b/src/test/org/apache/hadoop/hbase/regionserver/TestHMemcache.java index 677874a1e06..51db674558b 100644 --- a/src/test/org/apache/hadoop/hbase/regionserver/TestHMemcache.java +++ b/src/test/org/apache/hadoop/hbase/regionserver/TestHMemcache.java @@ -29,6 +29,7 @@ import junit.framework.TestCase; import org.apache.hadoop.io.Text; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HStoreKey; /** memcache test case */ public class TestHMemcache extends TestCase { diff --git a/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java index 2f0f79c9795..36f75a1c9a9 100644 --- a/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/src/test/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -32,9 +32,9 @@ import org.apache.hadoop.io.Text; import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.log4j.Logger; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.StaticTestEnvironment; import org.apache.hadoop.hbase.HConstants; -import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HScannerInterface; diff --git a/src/test/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java b/src/test/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java index 68704415b9d..53c2f51020c 100644 --- a/src/test/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java +++ b/src/test/org/apache/hadoop/hbase/regionserver/TestHStoreFile.java @@ -33,6 +33,7 @@ import org.apache.hadoop.io.WritableComparable; import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.StaticTestEnvironment; /** * Test HStoreFile diff --git a/src/test/org/apache/hadoop/hbase/regionserver/TestRegionServerExit.java b/src/test/org/apache/hadoop/hbase/regionserver/TestRegionServerExit.java index d2a94eb1af3..4b625451330 100644 --- a/src/test/org/apache/hadoop/hbase/regionserver/TestRegionServerExit.java +++ b/src/test/org/apache/hadoop/hbase/regionserver/TestRegionServerExit.java @@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.HBaseClusterTestCase; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.LocalHBaseCluster; diff --git a/src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java b/src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java index f0de0a4fb24..6f839647076 100644 --- a/src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java +++ b/src/test/org/apache/hadoop/hbase/regionserver/TestScanner.java @@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HScannerInterface; import org.apache.hadoop.hbase.HServerAddress; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.StaticTestEnvironment; /** diff --git a/src/test/org/apache/hadoop/hbase/regionserver/TestSplit.java b/src/test/org/apache/hadoop/hbase/regionserver/TestSplit.java index 91062e8bcac..9e8108bb42f 100644 --- a/src/test/org/apache/hadoop/hbase/regionserver/TestSplit.java +++ b/src/test/org/apache/hadoop/hbase/regionserver/TestSplit.java @@ -28,6 +28,7 @@ import org.apache.hadoop.dfs.MiniDFSCluster; import org.apache.hadoop.io.Text; import org.apache.log4j.Level; import org.apache.log4j.Logger; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.MultiRegionTable; import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HTableDescriptor; diff --git a/src/test/org/apache/hadoop/hbase/regionserver/TestTimestamp.java b/src/test/org/apache/hadoop/hbase/regionserver/TestTimestamp.java index 164d435bfa7..f685f52ca1b 100644 --- a/src/test/org/apache/hadoop/hbase/regionserver/TestTimestamp.java +++ b/src/test/org/apache/hadoop/hbase/regionserver/TestTimestamp.java @@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.HConstants; +import org.apache.hadoop.hbase.HStoreKey; import org.apache.hadoop.hbase.StaticTestEnvironment; import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.HScannerInterface;