HBASE-468 Move HStoreKey back to o.a.h.h

git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@630605 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jim Kellerman 2008-02-24 09:22:10 +00:00
parent 4c9a4b4cb0
commit d280d4d0ec
45 changed files with 143 additions and 77 deletions

View File

@ -64,6 +64,7 @@ Hbase Change Log
HBASE-419 Move RegionServer and related classes into regionserver package HBASE-419 Move RegionServer and related classes into regionserver package
HBASE-457 Factor Master into Master, RegionManager, and ServerManager HBASE-457 Factor Master into Master, RegionManager, and ServerManager
HBASE-464 HBASE-419 introduced javadoc errors HBASE-464 HBASE-419 introduced javadoc errors
HBASE-468 Move HStoreKey back to o.a.h.h
Branch 0.1 Branch 0.1

View File

@ -40,7 +40,6 @@ import org.apache.hadoop.hbase.client.HConnectionManager;
import org.apache.hadoop.hbase.regionserver.HLog; import org.apache.hadoop.hbase.regionserver.HLog;
import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.HStoreKey;
/** /**
* A non-instantiable class that has a static method capable of compacting * A non-instantiable class that has a static method capable of compacting
@ -355,7 +354,6 @@ class HMerge implements HConstants {
oldRegion2 oldRegion2
}; };
for(int r = 0; r < regionsToDelete.length; r++) { for(int r = 0; r < regionsToDelete.length; r++) {
long lockid = Math.abs(rand.nextLong());
BatchUpdate b = new BatchUpdate(regionsToDelete[r]); BatchUpdate b = new BatchUpdate(regionsToDelete[r]);
b.delete(COL_REGIONINFO); b.delete(COL_REGIONINFO);
b.delete(COL_SERVER); b.delete(COL_SERVER);

View File

@ -26,7 +26,6 @@ import java.util.Map;
import java.util.SortedMap; import java.util.SortedMap;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.hbase.regionserver.HStoreKey;
/** /**
* HScannerInterface iterates through a set of rows. It's implemented by * HScannerInterface iterates through a set of rows. It's implemented by

View File

@ -17,12 +17,10 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.regionserver; package org.apache.hadoop.hbase;
import org.apache.hadoop.hbase.io.TextSequence; import org.apache.hadoop.hbase.io.TextSequence;
import org.apache.hadoop.hbase.InvalidColumnNameException;
import org.apache.hadoop.io.*; import org.apache.hadoop.io.*;
import org.apache.hadoop.hbase.HConstants;
import java.io.*; import java.io.*;
import java.nio.ByteBuffer; import java.nio.ByteBuffer;
@ -227,6 +225,7 @@ public class HStoreKey implements WritableComparable {
// Comparable // Comparable
/** {@inheritDoc} */
public int compareTo(Object o) { public int compareTo(Object o) {
HStoreKey other = (HStoreKey)o; HStoreKey other = (HStoreKey)o;
int result = this.row.compareTo(other.row); int result = this.row.compareTo(other.row);
@ -286,6 +285,7 @@ public class HStoreKey implements WritableComparable {
* Extracts the column family name from a column * Extracts the column family name from a column
* For example, returns 'info' if the specified column was 'info:server' * For example, returns 'info' if the specified column was 'info:server'
* @param col name of column * @param col name of column
* @param withColon set to true if colon separator should be returned
* @return column famile as a TextSequence based on the passed * @return column famile as a TextSequence based on the passed
* <code>col</code>. If <code>col</code> is reused, make a new Text of * <code>col</code>. If <code>col</code> is reused, make a new Text of
* the result by calling {@link TextSequence#toText()}. * the result by calling {@link TextSequence#toText()}.

View File

@ -34,6 +34,7 @@ import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.hbase.master.HMasterInterface; import org.apache.hadoop.hbase.master.HMasterInterface;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.MasterNotRunningException; import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
@ -42,10 +43,8 @@ import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.TableExistsException; import org.apache.hadoop.hbase.TableExistsException;
import org.apache.hadoop.hbase.RemoteExceptionHandler; import org.apache.hadoop.hbase.RemoteExceptionHandler;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.regionserver.HRegionInterface; import org.apache.hadoop.hbase.regionserver.HRegionInterface;
import org.apache.hadoop.hbase.regionserver.HStoreKey;
/** /**
* Provides administrative functions for HBase * Provides administrative functions for HBase
@ -118,7 +117,6 @@ public class HBaseAdmin implements HConstants {
* *
* @throws IllegalArgumentException if the table name is reserved * @throws IllegalArgumentException if the table name is reserved
* @throws MasterNotRunningException if master is not running * @throws MasterNotRunningException if master is not running
* @throws NoServerForRegionException if root region is not being served
* @throws TableExistsException if table already exists (If concurrent * @throws TableExistsException if table already exists (If concurrent
* threads, the table may have been created between test-for-existence * threads, the table may have been created between test-for-existence
* and attempt-at-creation). * and attempt-at-creation).
@ -155,7 +153,6 @@ public class HBaseAdmin implements HConstants {
* *
* @throws IllegalArgumentException if the table name is reserved * @throws IllegalArgumentException if the table name is reserved
* @throws MasterNotRunningException if master is not running * @throws MasterNotRunningException if master is not running
* @throws NoServerForRegionException if root region is not being served
* @throws TableExistsException if table already exists (If concurrent * @throws TableExistsException if table already exists (If concurrent
* threads, the table may have been created between test-for-existence * threads, the table may have been created between test-for-existence
* and attempt-at-creation). * and attempt-at-creation).

View File

@ -39,6 +39,7 @@ import org.apache.hadoop.io.Writable;
import org.apache.hadoop.ipc.RemoteException; import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.hbase.master.HMasterInterface; import org.apache.hadoop.hbase.master.HMasterInterface;
import org.apache.hadoop.hbase.util.SoftSortedMap; import org.apache.hadoop.hbase.util.SoftSortedMap;
import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
@ -46,14 +47,12 @@ import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.HServerAddress; import org.apache.hadoop.hbase.HServerAddress;
import org.apache.hadoop.hbase.LocalHBaseCluster; import org.apache.hadoop.hbase.LocalHBaseCluster;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.NoServerForRegionException; import org.apache.hadoop.hbase.NoServerForRegionException;
import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.RemoteExceptionHandler; import org.apache.hadoop.hbase.RemoteExceptionHandler;
import org.apache.hadoop.hbase.regionserver.HRegionInterface; import org.apache.hadoop.hbase.regionserver.HRegionInterface;
import org.apache.hadoop.hbase.regionserver.HStoreKey;
/** /**
* A non-instantiable class that manages connections to multiple tables in * A non-instantiable class that manages connections to multiple tables in
@ -130,7 +129,7 @@ public class HConnectionManager implements HConstants {
private Map<Text, SoftSortedMap<Text, HRegionLocation>> private Map<Text, SoftSortedMap<Text, HRegionLocation>>
cachedRegionLocations = new ConcurrentHashMap<Text, cachedRegionLocations = new ConcurrentHashMap<Text,
SoftSortedMap<Text, HRegionLocation>>();; SoftSortedMap<Text, HRegionLocation>>();
/** /**
* constructor * constructor
@ -300,7 +299,7 @@ public class HConnectionManager implements HConstants {
continue; continue;
} }
finally { finally {
if (scannerId != -1L) { if (scannerId != -1L && server != null) {
server.close(scannerId); server.close(scannerId);
} }
} }
@ -309,11 +308,13 @@ public class HConnectionManager implements HConstants {
return uniqueTables.toArray(new HTableDescriptor[uniqueTables.size()]); return uniqueTables.toArray(new HTableDescriptor[uniqueTables.size()]);
} }
/** {@inheritDoc} */
public HRegionLocation locateRegion(Text tableName, Text row) public HRegionLocation locateRegion(Text tableName, Text row)
throws IOException{ throws IOException{
return locateRegion(tableName, row, true); return locateRegion(tableName, row, true);
} }
/** {@inheritDoc} */
public HRegionLocation relocateRegion(Text tableName, Text row) public HRegionLocation relocateRegion(Text tableName, Text row)
throws IOException{ throws IOException{
return locateRegion(tableName, row, false); return locateRegion(tableName, row, false);

View File

@ -47,12 +47,12 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.HScannerInterface; import org.apache.hadoop.hbase.HScannerInterface;
import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.RemoteExceptionHandler; import org.apache.hadoop.hbase.RemoteExceptionHandler;
import org.apache.hadoop.hbase.NotServingRegionException; import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.hadoop.hbase.regionserver.HStoreKey;
import org.apache.hadoop.hbase.regionserver.HRegionInterface; import org.apache.hadoop.hbase.regionserver.HRegionInterface;
/** /**
@ -93,6 +93,7 @@ public class HTable implements HConstants {
* Find region location hosting passed row using cached info * Find region location hosting passed row using cached info
* @param row Row to find. * @param row Row to find.
* @return Location of row. * @return Location of row.
* @throws IOException
*/ */
public HRegionLocation getRegionLocation(Text row) throws IOException { public HRegionLocation getRegionLocation(Text row) throws IOException {
return this.connection.locateRegion(this.tableName, row); return this.connection.locateRegion(this.tableName, row);
@ -744,6 +745,8 @@ public class HTable implements HConstants {
/** /**
* Commit a BatchUpdate to the table. * Commit a BatchUpdate to the table.
* @param batchUpdate
* @throws IOException
*/ */
public synchronized void commit(final BatchUpdate batchUpdate) public synchronized void commit(final BatchUpdate batchUpdate)
throws IOException { throws IOException {
@ -1024,6 +1027,9 @@ public class HTable implements HConstants {
return null; return null;
} }
/**
* Does nothing anymore
*/
@Deprecated @Deprecated
public void close() { public void close() {
// do nothing... // do nothing...

View File

@ -31,7 +31,7 @@ import java.util.TreeMap;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HScannerInterface; import org.apache.hadoop.hbase.HScannerInterface;
import org.apache.hadoop.hbase.regionserver.HStoreKey; import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.Shell; import org.apache.hadoop.hbase.Shell;
import org.apache.hadoop.hbase.filter.RowFilterInterface; import org.apache.hadoop.hbase.filter.RowFilterInterface;

View File

@ -34,7 +34,7 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
import org.apache.hadoop.util.ReflectionUtils; import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.hbase.regionserver.HStoreKey; import org.apache.hadoop.hbase.HStoreKey;
/** /**
* A Writable Map. * A Writable Map.
@ -167,6 +167,7 @@ public class HbaseMapWritable implements Map<Writable, Writable>, Writable,
return b; return b;
} }
/** {@inheritDoc} */
public void write(DataOutput out) throws IOException { public void write(DataOutput out) throws IOException {
// Write out the number of entries in the map // Write out the number of entries in the map
out.writeInt(instance.size()); out.writeInt(instance.size());
@ -180,6 +181,7 @@ public class HbaseMapWritable implements Map<Writable, Writable>, Writable,
} }
} }
/** {@inheritDoc} */
public void readFields(DataInput in) throws IOException { public void readFields(DataInput in) throws IOException {
// First clear the map. Otherwise we will just accumulate // First clear the map. Otherwise we will just accumulate
// entries every time this method is called. // entries every time this method is called.

View File

@ -25,7 +25,7 @@ import java.util.ArrayList;
import java.util.Map; import java.util.Map;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.regionserver.HStoreKey; import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.io.MapWritable;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
@ -85,7 +85,7 @@ public class GroupingTableMap extends TableMap<Text,MapWritable> {
* Pass the new key and value to reduce. * Pass the new key and value to reduce.
* If any of the grouping columns are not found in the value, the record is skipped. * If any of the grouping columns are not found in the value, the record is skipped.
* *
* @see org.apache.hadoop.hbase.mapred.TableMap#map(org.apache.hadoop.hbase.regionserver.HStoreKey, org.apache.hadoop.io.MapWritable, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) * @see org.apache.hadoop.hbase.mapred.TableMap#map(org.apache.hadoop.hbase.HStoreKey, org.apache.hadoop.io.MapWritable, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter)
*/ */
@Override @Override
public void map(@SuppressWarnings("unused") HStoreKey key, public void map(@SuppressWarnings("unused") HStoreKey key,

View File

@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.mapred;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.hbase.regionserver.HStoreKey; import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.io.MapWritable;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.OutputCollector;
@ -41,7 +41,7 @@ public class IdentityTableMap extends TableMap<Text, MapWritable> {
/** /**
* Pass the key, value to reduce * Pass the key, value to reduce
* *
* @see org.apache.hadoop.hbase.mapred.TableMap#map(org.apache.hadoop.hbase.regionserver.HStoreKey, org.apache.hadoop.io.MapWritable, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter) * @see org.apache.hadoop.hbase.mapred.TableMap#map(org.apache.hadoop.hbase.HStoreKey, org.apache.hadoop.io.MapWritable, org.apache.hadoop.mapred.OutputCollector, org.apache.hadoop.mapred.Reporter)
*/ */
@Override @Override
public void map(HStoreKey key, MapWritable value, public void map(HStoreKey key, MapWritable value,

View File

@ -38,7 +38,7 @@ import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HScannerInterface; import org.apache.hadoop.hbase.HScannerInterface;
import org.apache.hadoop.hbase.regionserver.HStoreKey; import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.HTable;
@ -84,6 +84,7 @@ implements InputFormat<HStoreKey, MapWritable>, JobConfigurable {
} }
} }
/** {@inheritDoc} */
public void close() throws IOException { public void close() throws IOException {
this.m_scanner.close(); this.m_scanner.close();
} }
@ -147,6 +148,7 @@ implements InputFormat<HStoreKey, MapWritable>, JobConfigurable {
} }
/** {@inheritDoc} */
public RecordReader<HStoreKey, MapWritable> getRecordReader( public RecordReader<HStoreKey, MapWritable> getRecordReader(
InputSplit split, InputSplit split,
@SuppressWarnings("unused") JobConf job, @SuppressWarnings("unused") JobConf job,
@ -178,6 +180,7 @@ implements InputFormat<HStoreKey, MapWritable>, JobConfigurable {
return splits; return splits;
} }
/** {@inheritDoc} */
public void configure(JobConf job) { public void configure(JobConf job) {
Path[] tableNames = job.getInputPaths(); Path[] tableNames = job.getInputPaths();
m_tableName = new Text(tableNames[0].getName()); m_tableName = new Text(tableNames[0].getName());
@ -194,6 +197,7 @@ implements InputFormat<HStoreKey, MapWritable>, JobConfigurable {
} }
} }
/** {@inheritDoc} */
public void validateInput(JobConf job) throws IOException { public void validateInput(JobConf job) throws IOException {
// expecting exactly one path // expecting exactly one path
Path[] tableNames = job.getInputPaths(); Path[] tableNames = job.getInputPaths();

View File

@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.mapred;
import java.io.IOException; import java.io.IOException;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.regionserver.HStoreKey; import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.io.MapWritable;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
@ -37,6 +37,8 @@ import org.apache.hadoop.mapred.Reporter;
* Scan an HBase table to sort by a specified sort column. * Scan an HBase table to sort by a specified sort column.
* If the column does not exist, the record is not passed to Reduce. * If the column does not exist, the record is not passed to Reduce.
* *
* @param <K> WritableComparable key class
* @param <V> Writable value class
*/ */
@SuppressWarnings("unchecked") @SuppressWarnings("unchecked")
public abstract class TableMap<K extends WritableComparable, V extends Writable> public abstract class TableMap<K extends WritableComparable, V extends Writable>

View File

@ -60,6 +60,7 @@ import org.apache.hadoop.ipc.Server;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.Leases; import org.apache.hadoop.hbase.Leases;
import org.apache.hadoop.hbase.HServerAddress; import org.apache.hadoop.hbase.HServerAddress;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
@ -80,7 +81,6 @@ import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.regionserver.HRegionInterface; import org.apache.hadoop.hbase.regionserver.HRegionInterface;
import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.HStoreKey;
/** /**
* HMaster is the "master server" for a HBase. * HMaster is the "master server" for a HBase.

View File

@ -27,9 +27,9 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.io.HbaseMapWritable; import org.apache.hadoop.hbase.io.HbaseMapWritable;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.io.Writable;
import org.apache.hadoop.hbase.regionserver.HStoreKey;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable; import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
/* /*

View File

@ -29,6 +29,7 @@ import java.util.regex.Pattern;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
/** /**
@ -200,7 +201,7 @@ public abstract class HAbstractScanner implements HInternalScannerInterface {
* @return true if a match was found * @return true if a match was found
* @throws IOException * @throws IOException
* *
* @see org.apache.hadoop.hbase.HScannerInterface#next(org.apache.hadoop.hbase.regionserver.HStoreKey, java.util.SortedMap) * @see org.apache.hadoop.hbase.HScannerInterface#next(org.apache.hadoop.hbase.HStoreKey, java.util.SortedMap)
*/ */
public boolean next(HStoreKey key, SortedMap<Text, byte []> results) public boolean next(HStoreKey key, SortedMap<Text, byte []> results)
throws IOException { throws IOException {

View File

@ -35,6 +35,7 @@ import java.util.concurrent.locks.ReentrantLock;
import org.apache.commons.logging.Log; import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil; import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
@ -44,6 +45,7 @@ import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.io.SequenceFile.Reader; import org.apache.hadoop.io.SequenceFile.Reader;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
@ -501,7 +503,7 @@ public class HLog implements HConstants {
*/ */
public static void splitLog(Path rootDir, Path srcDir, FileSystem fs, public static void splitLog(Path rootDir, Path srcDir, FileSystem fs,
Configuration conf) throws IOException { Configuration conf) throws IOException {
Path logfiles[] = fs.listPaths(new Path[] { srcDir }); FileStatus logfiles[] = fs.listStatus(srcDir);
LOG.info("splitting " + logfiles.length + " log(s) in " + LOG.info("splitting " + logfiles.length + " log(s) in " +
srcDir.toString()); srcDir.toString());
Map<Text, SequenceFile.Writer> logWriters = Map<Text, SequenceFile.Writer> logWriters =
@ -513,14 +515,15 @@ public class HLog implements HConstants {
logfiles[i]); logfiles[i]);
} }
// Check for empty file. // Check for empty file.
if (fs.getFileStatus(logfiles[i]).getLen() <= 0) { if (logfiles[i].getLen() <= 0) {
LOG.info("Skipping " + logfiles[i].toString() + LOG.info("Skipping " + logfiles[i].toString() +
" because zero length"); " because zero length");
continue; continue;
} }
HLogKey key = new HLogKey(); HLogKey key = new HLogKey();
HLogEdit val = new HLogEdit(); HLogEdit val = new HLogEdit();
SequenceFile.Reader in = new SequenceFile.Reader(fs, logfiles[i], conf); SequenceFile.Reader in =
new SequenceFile.Reader(fs, logfiles[i].getPath(), conf);
try { try {
int count = 0; int count = 0;
for (; in.next(key, val); count++) { for (; in.next(key, val); count++) {

View File

@ -50,6 +50,7 @@ import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HScannerInterface; import org.apache.hadoop.hbase.HScannerInterface;
@ -104,6 +105,11 @@ public class HRegion implements HConstants {
* Merge two HRegions. They must be available on the current * Merge two HRegions. They must be available on the current
* HRegionServer. Returns a brand-new active HRegion, also * HRegionServer. Returns a brand-new active HRegion, also
* running on the current HRegionServer. * running on the current HRegionServer.
*
* @param srcA
* @param srcB
* @return new merged HRegion
* @throws IOException
*/ */
public static HRegion closeAndMerge(final HRegion srcA, final HRegion srcB) public static HRegion closeAndMerge(final HRegion srcA, final HRegion srcB)
throws IOException { throws IOException {
@ -538,6 +544,7 @@ public class HRegion implements HConstants {
////////////////////////////////////////////////////////////////////////////// //////////////////////////////////////////////////////////////////////////////
/** /**
* @param midkey
* @return returns size of largest HStore. Also returns whether store is * @return returns size of largest HStore. Also returns whether store is
* splitable or not (Its not splitable if region has a store that has a * splitable or not (Its not splitable if region has a store that has a
* reference store file). * reference store file).
@ -769,6 +776,8 @@ public class HRegion implements HConstants {
* Note that no locking is necessary at this level because compaction only * Note that no locking is necessary at this level because compaction only
* conflicts with a region split, and that cannot happen because the region * conflicts with a region split, and that cannot happen because the region
* server does them sequentially and not in parallel. * server does them sequentially and not in parallel.
*
* @throws IOException
*/ */
public boolean compactStores() throws IOException { public boolean compactStores() throws IOException {
if (this.closed.get()) { if (this.closed.get()) {
@ -1612,6 +1621,7 @@ public class HRegion implements HConstants {
return regionInfo.getRegionName().toString(); return regionInfo.getRegionName().toString();
} }
/** @return Path of region base directory */
public Path getBaseDir() { public Path getBaseDir() {
return this.basedir; return this.basedir;
} }

View File

@ -70,6 +70,7 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HServerInfo; import org.apache.hadoop.hbase.HServerInfo;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HMsg; import org.apache.hadoop.hbase.HMsg;
import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.Leases; import org.apache.hadoop.hbase.Leases;
import org.apache.hadoop.hbase.HServerAddress; import org.apache.hadoop.hbase.HServerAddress;
import org.apache.hadoop.hbase.RegionServerRunningException; import org.apache.hadoop.hbase.RegionServerRunningException;
@ -1590,6 +1591,7 @@ public class HRegionServer implements HConstants, HRegionInterface, Runnable {
return this.serverInfo; return this.serverInfo;
} }
/** @return the info server */
public InfoServer getInfoServer() { public InfoServer getInfoServer() {
return infoServer; return infoServer;
} }

View File

@ -39,6 +39,7 @@ import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FSDataInputStream; import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream; import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.filter.RowFilterInterface; import org.apache.hadoop.hbase.filter.RowFilterInterface;
@ -60,6 +61,7 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.RemoteExceptionHandler; import org.apache.hadoop.hbase.RemoteExceptionHandler;
@ -221,11 +223,12 @@ public class HStore implements HConstants {
} }
/** /**
* Find the key that matches <i>row</i> exactly, or the one that immediately * @param row
* preceeds it. * @param timestamp
* @return the key that matches <i>row</i> exactly, or the one that
* immediately preceeds it.
*/ */
public Text getRowKeyAtOrBefore(final Text row, long timestamp) public Text getRowKeyAtOrBefore(final Text row, long timestamp) {
throws IOException{
this.lock.readLock().lock(); this.lock.readLock().lock();
Text key_memcache = null; Text key_memcache = null;
@ -246,17 +249,17 @@ public class HStore implements HConstants {
return key_snapshot; return key_snapshot;
} else if (key_memcache != null && key_snapshot == null) { } else if (key_memcache != null && key_snapshot == null) {
return key_memcache; return key_memcache;
} else { } else if ( (key_memcache != null && key_memcache.equals(row))
// if either is a precise match, return the original row.
if ( (key_memcache != null && key_memcache.equals(row))
|| (key_snapshot != null && key_snapshot.equals(row)) ) { || (key_snapshot != null && key_snapshot.equals(row)) ) {
// if either is a precise match, return the original row.
return row; return row;
} } else if (key_memcache != null) {
// no precise matches, so return the one that is closer to the search // no precise matches, so return the one that is closer to the search
// key (greatest) // key (greatest)
return key_memcache.compareTo(key_snapshot) > 0 ? return key_memcache.compareTo(key_snapshot) > 0 ?
key_memcache : key_snapshot; key_memcache : key_snapshot;
} }
return null;
} finally { } finally {
this.lock.readLock().unlock(); this.lock.readLock().unlock();
} }
@ -869,10 +872,11 @@ public class HStore implements HConstants {
} }
// Look first at info files. If a reference, these contain info we need // Look first at info files. If a reference, these contain info we need
// to create the HStoreFile. // to create the HStoreFile.
Path infofiles[] = fs.listPaths(new Path[] {infodir}); FileStatus infofiles[] = fs.listStatus(infodir);
ArrayList<HStoreFile> results = new ArrayList<HStoreFile>(infofiles.length); ArrayList<HStoreFile> results = new ArrayList<HStoreFile>(infofiles.length);
ArrayList<Path> mapfiles = new ArrayList<Path>(infofiles.length); ArrayList<Path> mapfiles = new ArrayList<Path>(infofiles.length);
for (Path p: infofiles) { for (int i = 0; i < infofiles.length; i++) {
Path p = infofiles[i].getPath();
Matcher m = REF_NAME_PARSER.matcher(p.getName()); Matcher m = REF_NAME_PARSER.matcher(p.getName());
/* /*
* * * * * N O T E * * * * * * * * * * N O T E * * * * *
@ -912,11 +916,12 @@ public class HStore implements HConstants {
// List paths by experience returns fully qualified names -- at least when // List paths by experience returns fully qualified names -- at least when
// running on a mini hdfs cluster. // running on a mini hdfs cluster.
Path datfiles[] = fs.listPaths(new Path[] {mapdir}); FileStatus datfiles[] = fs.listStatus(mapdir);
for (int i = 0; i < datfiles.length; i++) { for (int i = 0; i < datfiles.length; i++) {
Path p = datfiles[i].getPath();
// If does not have sympathetic info file, delete. // If does not have sympathetic info file, delete.
if (!mapfiles.contains(fs.makeQualified(datfiles[i]))) { if (!mapfiles.contains(fs.makeQualified(p))) {
fs.delete(datfiles[i]); fs.delete(p);
} }
} }
return results; return results;
@ -1817,8 +1822,11 @@ public class HStore implements HConstants {
} }
/** /**
* Find the key that matches <i>row</i> exactly, or the one that immediately * @return the key that matches <i>row</i> exactly, or the one that immediately
* preceeds it. * preceeds it.
* @param row
* @param timestamp
* @throws IOException
*/ */
public Text getRowKeyAtOrBefore(final Text row, final long timestamp) public Text getRowKeyAtOrBefore(final Text row, final long timestamp)
throws IOException{ throws IOException{

View File

@ -48,6 +48,7 @@ import org.onelab.filter.Key;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HStoreKey;
/** /**
* A HStore data file. HStores usually have one or more of these files. They * A HStore data file. HStores usually have one or more of these files. They
@ -523,18 +524,34 @@ public class HStoreFile implements HConstants {
((encodedRegionName != null) ? "." + encodedRegionName : ""); ((encodedRegionName != null) ? "." + encodedRegionName : "");
} }
/**
* @param dir
* @param encodedRegionName
* @param colFamily
* @return path for map file directory
*/
public static Path getMapDir(Path dir, String encodedRegionName, Text colFamily) { public static Path getMapDir(Path dir, String encodedRegionName, Text colFamily) {
return new Path(dir, new Path(encodedRegionName, return new Path(dir, new Path(encodedRegionName,
new Path(colFamily.toString(), HSTORE_DATFILE_DIR))); new Path(colFamily.toString(), HSTORE_DATFILE_DIR)));
} }
/** @return the info directory path */ /**
* @param dir
* @param encodedRegionName
* @param colFamily
* @return the info directory path
*/
public static Path getInfoDir(Path dir, String encodedRegionName, Text colFamily) { public static Path getInfoDir(Path dir, String encodedRegionName, Text colFamily) {
return new Path(dir, new Path(encodedRegionName, return new Path(dir, new Path(encodedRegionName,
new Path(colFamily.toString(), HSTORE_INFO_DIR))); new Path(colFamily.toString(), HSTORE_INFO_DIR)));
} }
/** @return the bloom filter directory path */ /**
* @param dir
* @param encodedRegionName
* @param colFamily
* @return the bloom filter directory path
*/
public static Path getFilterDir(Path dir, String encodedRegionName, Text colFamily) { public static Path getFilterDir(Path dir, String encodedRegionName, Text colFamily) {
return new Path(dir, new Path(encodedRegionName, return new Path(dir, new Path(encodedRegionName,
new Path(colFamily.toString(), HSTORE_FILTER_DIR))); new Path(colFamily.toString(), HSTORE_FILTER_DIR)));
@ -723,6 +740,14 @@ public class HStoreFile implements HConstants {
bloomFilter = filter; bloomFilter = filter;
} }
/**
* @param fs
* @param dirName
* @param conf
* @param filter
* @param blockCacheEnabled
* @throws IOException
*/
public Reader(FileSystem fs, String dirName, Configuration conf, public Reader(FileSystem fs, String dirName, Configuration conf,
final Filter filter, final boolean blockCacheEnabled) final Filter filter, final boolean blockCacheEnabled)
throws IOException { throws IOException {

View File

@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HScannerInterface; import org.apache.hadoop.hbase.HScannerInterface;
import org.apache.hadoop.hbase.regionserver.HStoreKey; import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.util.JenkinsHash; import org.apache.hadoop.hbase.util.JenkinsHash;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.mortbay.servlet.MultiPartResponse; import org.mortbay.servlet.MultiPartResponse;

View File

@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HScannerInterface; import org.apache.hadoop.hbase.HScannerInterface;
import org.apache.hadoop.hbase.regionserver.HStoreKey; import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MasterNotRunningException; import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.thrift.generated.AlreadyExists; import org.apache.hadoop.hbase.thrift.generated.AlreadyExists;

View File

@ -58,12 +58,12 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HScannerInterface; import org.apache.hadoop.hbase.HScannerInterface;
import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.MasterNotRunningException; import org.apache.hadoop.hbase.MasterNotRunningException;
import org.apache.hadoop.hbase.regionserver.HLog; import org.apache.hadoop.hbase.regionserver.HLog;
import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.HStore; import org.apache.hadoop.hbase.regionserver.HStore;
import org.apache.hadoop.hbase.regionserver.HStoreKey;
/** /**
* Perform a file system upgrade to convert older file layouts to that * Perform a file system upgrade to convert older file layouts to that

View File

@ -47,7 +47,6 @@ import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.HStoreKey;
import org.apache.hadoop.hbase.regionserver.HRegionInterface; import org.apache.hadoop.hbase.regionserver.HRegionInterface;
/** /**

View File

@ -330,6 +330,7 @@ public abstract class HBaseTestCase extends TestCase {
public static interface Incommon { public static interface Incommon {
/** /**
* @param row * @param row
* @return update id
* @throws IOException * @throws IOException
*/ */
public long startUpdate(Text row) throws IOException; public long startUpdate(Text row) throws IOException;

View File

@ -35,7 +35,6 @@ import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.regionserver.HRegionServer;
import org.apache.hadoop.hbase.regionserver.HStoreKey;
/** /**
* Utility class to build a table of multiple regions. * Utility class to build a table of multiple regions.

View File

@ -48,8 +48,8 @@ import org.apache.hadoop.mapred.TextOutputFormat;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.io.BatchUpdate;
import org.apache.hadoop.hbase.regionserver.HStoreKey;
/** /**
* Script used evaluating HBase performance and scalability. Runs a HBase * Script used evaluating HBase performance and scalability. Runs a HBase
@ -386,9 +386,9 @@ public class PerformanceEvaluation implements HConstants {
@Override @Override
void testRow(@SuppressWarnings("unused") final int i) throws IOException { void testRow(@SuppressWarnings("unused") final int i) throws IOException {
Text row = getRandomRow(this.rand, this.totalRows); Text row = getRandomRow(this.rand, this.totalRows);
long lockid = table.startUpdate(row); BatchUpdate b = new BatchUpdate(row);
table.put(lockid, COLUMN_NAME, generateValue(this.rand)); b.put(COLUMN_NAME, generateValue(this.rand));
table.commit(lockid); table.commit(b);
} }
@Override @Override
@ -460,9 +460,9 @@ public class PerformanceEvaluation implements HConstants {
@Override @Override
void testRow(final int i) throws IOException { void testRow(final int i) throws IOException {
long lockid = table.startUpdate(format(i)); BatchUpdate b = new BatchUpdate(format(i));
table.put(lockid, COLUMN_NAME, generateValue(this.rand)); b.put(COLUMN_NAME, generateValue(this.rand));
table.commit(lockid); table.commit(b);
} }
@Override @Override
@ -535,7 +535,7 @@ public class PerformanceEvaluation implements HConstants {
return totalElapsedTime; return totalElapsedTime;
} }
private void runNIsOne(final String cmd) throws IOException { private void runNIsOne(final String cmd) {
Status status = new Status() { Status status = new Status() {
@SuppressWarnings("unused") @SuppressWarnings("unused")
public void setStatus(String msg) throws IOException { public void setStatus(String msg) throws IOException {

View File

@ -21,7 +21,6 @@ package org.apache.hadoop.hbase;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import junit.framework.TestCase; import junit.framework.TestCase;
import org.apache.hadoop.hbase.regionserver.HStoreKey;
/** /**
* Test comparing HBase objects. * Test comparing HBase objects.

View File

@ -27,7 +27,7 @@ import java.util.TreeMap;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.regionserver.HStoreKey; import org.apache.hadoop.hbase.io.BatchUpdate;
/** /**
* Test HBase Master and Region servers, client API * Test HBase Master and Region servers, client API
@ -97,12 +97,12 @@ public class TestHBaseCluster extends HBaseClusterTestCase {
// Write out a bunch of values // Write out a bunch of values
for (int k = FIRST_ROW; k <= NUM_VALS; k++) { for (int k = FIRST_ROW; k <= NUM_VALS; k++) {
long writeid = table.startUpdate(new Text("row_" + k)); BatchUpdate b = new BatchUpdate(new Text("row_" + k));
table.put(writeid, CONTENTS_BASIC, b.put(CONTENTS_BASIC,
(CONTENTSTR + k).getBytes(HConstants.UTF8_ENCODING)); (CONTENTSTR + k).getBytes(HConstants.UTF8_ENCODING));
table.put(writeid, new Text(ANCHORNUM + k), b.put(new Text(ANCHORNUM + k),
(ANCHORSTR + k).getBytes(HConstants.UTF8_ENCODING)); (ANCHORSTR + k).getBytes(HConstants.UTF8_ENCODING));
table.commit(writeid); table.commit(b);
} }
System.out.println("Write " + NUM_VALS + " rows. Elapsed time: " System.out.println("Write " + NUM_VALS + " rows. Elapsed time: "
+ ((System.currentTimeMillis() - startTime) / 1000.0)); + ((System.currentTimeMillis() - startTime) / 1000.0));

View File

@ -31,7 +31,6 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.HStoreKey;
import org.apache.hadoop.hbase.io.BatchUpdate; import org.apache.hadoop.hbase.io.BatchUpdate;
/** test the scanner API at all levels */ /** test the scanner API at all levels */

View File

@ -25,10 +25,10 @@ import java.util.Map;
import java.util.TreeMap; import java.util.TreeMap;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.hbase.HBaseClusterTestCase; import org.apache.hadoop.hbase.HBaseClusterTestCase;
import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HScannerInterface; import org.apache.hadoop.hbase.HScannerInterface;
import org.apache.hadoop.hbase.regionserver.HStoreKey;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
/** /**

View File

@ -30,10 +30,10 @@ import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hbase.HBaseClusterTestCase; import org.apache.hadoop.hbase.HBaseClusterTestCase;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.TableNotFoundException;
import org.apache.hadoop.hbase.HScannerInterface; import org.apache.hadoop.hbase.HScannerInterface;
import org.apache.hadoop.hbase.regionserver.HStoreKey;
/** /**
* Tests HTable * Tests HTable

View File

@ -39,8 +39,8 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.HScannerInterface; import org.apache.hadoop.hbase.HScannerInterface;
import org.apache.hadoop.hbase.regionserver.HStoreKey;
import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.MultiRegionTable; import org.apache.hadoop.hbase.MultiRegionTable;

View File

@ -32,8 +32,8 @@ import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HScannerInterface; import org.apache.hadoop.hbase.HScannerInterface;
import org.apache.hadoop.hbase.regionserver.HStoreKey;
import org.apache.hadoop.hbase.client.HTable; import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.MultiRegionTable; import org.apache.hadoop.hbase.MultiRegionTable;

View File

@ -31,6 +31,7 @@ import org.apache.hadoop.io.MapFile;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.StaticTestEnvironment; import org.apache.hadoop.hbase.StaticTestEnvironment;

View File

@ -30,8 +30,8 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HScannerInterface; import org.apache.hadoop.hbase.HScannerInterface;
import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HRegionInfo;
/** /**
@ -148,7 +148,10 @@ public class TestGet2 extends HBaseTestCase {
} }
} }
/** For HADOOP-2443 */ /**
* For HADOOP-2443
* @throws IOException
*/
public void testGetClosestRowBefore() throws IOException{ public void testGetClosestRowBefore() throws IOException{
HRegion region = null; HRegion region = null;
@ -156,7 +159,6 @@ public class TestGet2 extends HBaseTestCase {
try { try {
HTableDescriptor htd = createTableDescriptor(getName()); HTableDescriptor htd = createTableDescriptor(getName());
HRegionInfo hri = new HRegionInfo(htd, null, null);
region = createNewHRegion(htd, null, null); region = createNewHRegion(htd, null, null);
region_incommon = new HRegionIncommon(region); region_incommon = new HRegionIncommon(region);

View File

@ -30,6 +30,7 @@ import org.apache.hadoop.io.SequenceFile.Reader;
import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.StaticTestEnvironment; import org.apache.hadoop.hbase.StaticTestEnvironment;
/** JUnit test case for HLog */ /** JUnit test case for HLog */

View File

@ -29,6 +29,7 @@ import junit.framework.TestCase;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HStoreKey;
/** memcache test case */ /** memcache test case */
public class TestHMemcache extends TestCase { public class TestHMemcache extends TestCase {

View File

@ -32,9 +32,9 @@ import org.apache.hadoop.io.Text;
import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.StaticTestEnvironment; import org.apache.hadoop.hbase.StaticTestEnvironment;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HScannerInterface; import org.apache.hadoop.hbase.HScannerInterface;

View File

@ -33,6 +33,7 @@ import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.StaticTestEnvironment; import org.apache.hadoop.hbase.StaticTestEnvironment;
/** /**
* Test HStoreFile * Test HStoreFile

View File

@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.HBaseClusterTestCase; import org.apache.hadoop.hbase.HBaseClusterTestCase;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.HColumnDescriptor; import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.LocalHBaseCluster; import org.apache.hadoop.hbase.LocalHBaseCluster;

View File

@ -33,6 +33,7 @@ import org.apache.hadoop.hbase.HRegionInfo;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HScannerInterface; import org.apache.hadoop.hbase.HScannerInterface;
import org.apache.hadoop.hbase.HServerAddress; import org.apache.hadoop.hbase.HServerAddress;
import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.StaticTestEnvironment; import org.apache.hadoop.hbase.StaticTestEnvironment;
/** /**

View File

@ -28,6 +28,7 @@ import org.apache.hadoop.dfs.MiniDFSCluster;
import org.apache.hadoop.io.Text; import org.apache.hadoop.io.Text;
import org.apache.log4j.Level; import org.apache.log4j.Level;
import org.apache.log4j.Logger; import org.apache.log4j.Logger;
import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.MultiRegionTable; import org.apache.hadoop.hbase.MultiRegionTable;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HTableDescriptor; import org.apache.hadoop.hbase.HTableDescriptor;

View File

@ -30,6 +30,7 @@ import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.HBaseAdmin; import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.HBaseTestCase; import org.apache.hadoop.hbase.HBaseTestCase;
import org.apache.hadoop.hbase.HConstants; import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HStoreKey;
import org.apache.hadoop.hbase.StaticTestEnvironment; import org.apache.hadoop.hbase.StaticTestEnvironment;
import org.apache.hadoop.hbase.MiniHBaseCluster; import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.HScannerInterface; import org.apache.hadoop.hbase.HScannerInterface;