419 Move RegionServer and related classes into regionserver package
git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@630550 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
1635b75918
commit
dcbc0773fc
|
@ -162,7 +162,7 @@ if [ "$COMMAND" = "shell" ] ; then
|
|||
elif [ "$COMMAND" = "master" ] ; then
|
||||
CLASS='org.apache.hadoop.hbase.master.HMaster'
|
||||
elif [ "$COMMAND" = "regionserver" ] ; then
|
||||
CLASS='org.apache.hadoop.hbase.HRegionServer'
|
||||
CLASS='org.apache.hadoop.hbase.regionserver.HRegionServer'
|
||||
elif [ "$COMMAND" = "rest" ] ; then
|
||||
CLASS='org.apache.hadoop.hbase.rest.Dispatcher'
|
||||
elif [ "$COMMAND" = "thrift" ] ; then
|
||||
|
|
|
@ -72,7 +72,7 @@
|
|||
</property>
|
||||
<property>
|
||||
<name>hbase.regionserver.class</name>
|
||||
<value>org.apache.hadoop.hbase.HRegionInterface</value>
|
||||
<value>org.apache.hadoop.hbase.regionserver.HRegionInterface</value>
|
||||
<description>An interface that is assignable to HRegionInterface. Used in HClient for
|
||||
opening proxy to remote region server.
|
||||
</description>
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
package org.apache.hadoop.hbase;
|
||||
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||
|
||||
/**
|
||||
* HConstants holds a bunch of HBase-related constants
|
||||
|
|
|
@ -37,6 +37,11 @@ import org.apache.hadoop.hbase.io.BatchUpdate;
|
|||
import org.apache.hadoop.hbase.client.HTable;
|
||||
import org.apache.hadoop.hbase.client.HConnection;
|
||||
import org.apache.hadoop.hbase.client.HConnectionManager;
|
||||
|
||||
import org.apache.hadoop.hbase.regionserver.HLog;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||
|
||||
/**
|
||||
* A non-instantiable class that has a static method capable of compacting
|
||||
* a table by merging adjacent regions that have grown too small.
|
||||
|
|
|
@ -26,6 +26,7 @@ import java.util.Map;
|
|||
import java.util.SortedMap;
|
||||
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||
|
||||
/**
|
||||
* HScannerInterface iterates through a set of rows. It's implemented by
|
||||
|
|
|
@ -32,6 +32,8 @@ import org.apache.hadoop.util.ReflectionUtils;
|
|||
import org.apache.hadoop.hbase.master.HMaster;
|
||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionServer;
|
||||
|
||||
/**
|
||||
* This class creates a single process HBase cluster. One thread is created for
|
||||
* a master and one per region server.
|
||||
|
@ -159,7 +161,7 @@ public class LocalHBaseCluster implements HConstants {
|
|||
while (regionServerThread.isAlive()) {
|
||||
try {
|
||||
LOG.info("Waiting on " +
|
||||
regionServerThread.getRegionServer().serverInfo.toString());
|
||||
regionServerThread.getRegionServer().getServerInfo().toString());
|
||||
regionServerThread.join();
|
||||
} catch (InterruptedException e) {
|
||||
e.printStackTrace();
|
||||
|
|
|
@ -41,10 +41,11 @@ import org.apache.hadoop.hbase.HRegionLocation;
|
|||
import org.apache.hadoop.hbase.TableNotFoundException;
|
||||
import org.apache.hadoop.hbase.TableExistsException;
|
||||
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
||||
import org.apache.hadoop.hbase.HRegionInterface;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HRegionLocation;
|
||||
import org.apache.hadoop.hbase.HStoreKey;
|
||||
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||
|
||||
/**
|
||||
* Provides administrative functions for HBase
|
||||
|
|
|
@ -28,7 +28,8 @@ import org.apache.hadoop.hbase.MasterNotRunningException;
|
|||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.HRegionLocation;
|
||||
import org.apache.hadoop.hbase.HServerAddress;
|
||||
import org.apache.hadoop.hbase.HRegionInterface;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||
|
||||
/**
|
||||
*
|
||||
*/
|
||||
|
|
|
@ -42,20 +42,19 @@ import org.apache.hadoop.hbase.util.SoftSortedMap;
|
|||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HRegionInterface;
|
||||
import org.apache.hadoop.hbase.MasterNotRunningException;
|
||||
import org.apache.hadoop.hbase.HRegionLocation;
|
||||
import org.apache.hadoop.hbase.HServerAddress;
|
||||
import org.apache.hadoop.hbase.LocalHBaseCluster;
|
||||
import org.apache.hadoop.hbase.HStoreKey;
|
||||
import org.apache.hadoop.hbase.HRegionInterface;
|
||||
import org.apache.hadoop.hbase.HRegionLocation;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HStoreKey;
|
||||
import org.apache.hadoop.hbase.NoServerForRegionException;
|
||||
import org.apache.hadoop.hbase.TableNotFoundException;
|
||||
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
||||
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||
|
||||
/**
|
||||
* A non-instantiable class that manages connections to multiple tables in
|
||||
* multiple HBase instances
|
||||
|
|
|
@ -47,14 +47,13 @@ import org.apache.hadoop.hbase.HConstants;
|
|||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HRegionLocation;
|
||||
import org.apache.hadoop.hbase.HScannerInterface;
|
||||
import org.apache.hadoop.hbase.HRegionInterface;
|
||||
import org.apache.hadoop.hbase.HStoreKey;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
||||
import org.apache.hadoop.hbase.NotServingRegionException;
|
||||
import org.apache.hadoop.hbase.HRegionInterface;
|
||||
import org.apache.hadoop.hbase.HStoreKey;
|
||||
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||
|
||||
/**
|
||||
* Used to communicate with a single HBase table
|
||||
|
|
|
@ -35,7 +35,7 @@ import org.apache.commons.logging.Log;
|
|||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.io.Text;
|
||||
|
||||
import org.apache.hadoop.hbase.HLogEdit;
|
||||
import org.apache.hadoop.hbase.regionserver.HLogEdit;
|
||||
|
||||
/**
|
||||
* Implementation of RowFilterInterface that can filter by rowkey regular
|
||||
|
|
|
@ -6,8 +6,8 @@ import javax.servlet.jsp.*;
|
|||
import java.util.*;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.util.VersionInfo;
|
||||
import org.apache.hadoop.hbase.HRegionServer;
|
||||
import org.apache.hadoop.hbase.HRegion;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionServer;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HServerInfo;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
|
|
|
@ -31,7 +31,7 @@ import java.util.TreeMap;
|
|||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HScannerInterface;
|
||||
import org.apache.hadoop.hbase.HStoreKey;
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.Shell;
|
||||
import org.apache.hadoop.hbase.filter.RowFilterInterface;
|
||||
|
|
|
@ -30,11 +30,12 @@ import java.util.concurrent.atomic.AtomicReference;
|
|||
|
||||
import org.apache.hadoop.conf.Configurable;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HStoreKey;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.io.Writable;
|
||||
import org.apache.hadoop.util.ReflectionUtils;
|
||||
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||
|
||||
/**
|
||||
* A Writable Map.
|
||||
* Like {@link org.apache.hadoop.io.MapWritable} but dumb. It will fail
|
||||
|
|
|
@ -25,7 +25,7 @@ import java.util.ArrayList;
|
|||
import java.util.Map;
|
||||
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HStoreKey;
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||
import org.apache.hadoop.io.MapWritable;
|
||||
import org.apache.hadoop.io.Text;
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.mapred;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.hbase.HStoreKey;
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||
import org.apache.hadoop.io.MapWritable;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.mapred.OutputCollector;
|
||||
|
|
|
@ -38,7 +38,7 @@ import org.apache.hadoop.mapred.Reporter;
|
|||
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HScannerInterface;
|
||||
import org.apache.hadoop.hbase.HStoreKey;
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||
import org.apache.hadoop.hbase.client.HTable;
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.mapred;
|
|||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HStoreKey;
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||
import org.apache.hadoop.io.MapWritable;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.io.Writable;
|
||||
|
|
|
@ -23,7 +23,7 @@ import java.io.IOException;
|
|||
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HRegionInterface;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||
import org.apache.hadoop.io.Text;
|
||||
|
||||
/** Instantiated to add a column family to a table */
|
||||
|
|
|
@ -32,8 +32,6 @@ import org.apache.hadoop.ipc.RemoteException;
|
|||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.Chore;
|
||||
import org.apache.hadoop.hbase.HRegion;
|
||||
import org.apache.hadoop.hbase.HRegionInterface;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HServerInfo;
|
||||
import org.apache.hadoop.io.Text;
|
||||
|
@ -44,11 +42,15 @@ import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
|||
import org.apache.hadoop.hbase.UnknownScannerException;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HStoreFile;
|
||||
import org.apache.hadoop.hbase.HStore;
|
||||
import org.apache.hadoop.hbase.HLog;
|
||||
import org.apache.hadoop.hbase.io.BatchUpdate;
|
||||
|
||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreFile;
|
||||
import org.apache.hadoop.hbase.regionserver.HStore;
|
||||
import org.apache.hadoop.hbase.regionserver.HLog;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||
|
||||
|
||||
/**
|
||||
* Base HRegion scanner class. Holds utilty common to <code>ROOT</code> and
|
||||
* <code>META</code> HRegion scanners.
|
||||
|
|
|
@ -25,7 +25,7 @@ import java.util.HashSet;
|
|||
import java.util.Map;
|
||||
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HRegionInterface;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||
import org.apache.hadoop.hbase.io.BatchUpdate;
|
||||
import org.apache.hadoop.hbase.util.Writables;
|
||||
import org.apache.hadoop.io.Text;
|
||||
|
|
|
@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.master;
|
|||
import java.io.IOException;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HRegionInterface;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||
import org.apache.hadoop.hbase.io.BatchUpdate;
|
||||
import org.apache.hadoop.hbase.TableNotDisabledException;
|
||||
import org.apache.hadoop.hbase.util.Writables;
|
||||
|
|
|
@ -23,9 +23,9 @@ import java.io.IOException;
|
|||
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HRegionInterface;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HStoreFile;
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreFile;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||
|
||||
/** Instantiated to remove a column family from a table */
|
||||
class DeleteColumn extends ColumnOperation {
|
||||
|
|
|
@ -62,24 +62,26 @@ import org.apache.hadoop.hbase.HConstants;
|
|||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.Leases;
|
||||
import org.apache.hadoop.hbase.HServerAddress;
|
||||
import org.apache.hadoop.hbase.client.HConnection;
|
||||
import org.apache.hadoop.hbase.client.HConnectionManager;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.HRegionInterface;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HServerLoad;
|
||||
import org.apache.hadoop.hbase.HRegion;
|
||||
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
||||
import org.apache.hadoop.hbase.HMsg;
|
||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||
import org.apache.hadoop.hbase.LocalHBaseCluster;
|
||||
import org.apache.hadoop.hbase.HStoreKey;
|
||||
import org.apache.hadoop.hbase.HServerInfo;
|
||||
import org.apache.hadoop.hbase.TableExistsException;
|
||||
import org.apache.hadoop.hbase.MasterNotRunningException;
|
||||
import org.apache.hadoop.hbase.LeaseListener;
|
||||
|
||||
import org.apache.hadoop.hbase.client.HConnection;
|
||||
import org.apache.hadoop.hbase.client.HConnectionManager;
|
||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||
|
||||
/**
|
||||
* HMaster is the "master server" for a HBase.
|
||||
* There is only one HMaster for a single HBase deployment.
|
||||
|
|
|
@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.master;
|
|||
import java.util.Map;
|
||||
import java.io.IOException;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HRegionInterface;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.io.Text;
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.master;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.hbase.HRegion;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
||||
|
||||
|
|
|
@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.HServerAddress;
|
|||
import org.apache.hadoop.hbase.HServerInfo;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.util.Writables;
|
||||
import org.apache.hadoop.hbase.HRegionInterface;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||
import org.apache.hadoop.hbase.io.BatchUpdate;
|
||||
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
||||
|
||||
|
|
|
@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.master;
|
|||
import java.io.IOException;
|
||||
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HRegionInterface;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||
import org.apache.hadoop.hbase.HServerAddress;
|
||||
import org.apache.hadoop.io.Text;
|
||||
|
||||
|
|
|
@ -31,13 +31,14 @@ import org.apache.hadoop.hbase.HServerAddress;
|
|||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HServerInfo;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HRegionInterface;
|
||||
import org.apache.hadoop.hbase.HRegion;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.hbase.io.HbaseMapWritable;
|
||||
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
||||
import org.apache.hadoop.hbase.util.Writables;
|
||||
import org.apache.hadoop.hbase.HLog;
|
||||
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
import org.apache.hadoop.hbase.regionserver.HLog;
|
||||
|
||||
/**
|
||||
* Instantiated when a server's lease has expired, meaning it has crashed.
|
||||
|
|
|
@ -45,8 +45,8 @@ import org.apache.hadoop.hbase.HServerAddress;
|
|||
import org.apache.hadoop.hbase.HServerInfo;
|
||||
import org.apache.hadoop.hbase.HServerLoad;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HRegion;
|
||||
import org.apache.hadoop.hbase.HRegionInterface;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||
import org.apache.hadoop.hbase.HMsg;
|
||||
import org.apache.hadoop.hbase.util.Threads;
|
||||
import org.apache.hadoop.hbase.io.BatchUpdate;
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.apache.commons.logging.LogFactory;
|
|||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.hbase.io.HbaseMapWritable;
|
||||
import org.apache.hadoop.io.Writable;
|
||||
import org.apache.hadoop.hbase.HStoreKey;
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||
|
||||
/*
|
||||
|
|
|
@ -23,13 +23,14 @@ import java.io.IOException;
|
|||
import java.util.HashSet;
|
||||
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.hbase.HRegion;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HRegionInterface;
|
||||
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
||||
import org.apache.hadoop.hbase.io.BatchUpdate;
|
||||
import org.apache.hadoop.io.Text;
|
||||
|
||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||
|
||||
/**
|
||||
* Instantiated to delete a table
|
||||
* Note that it extends ChangeTableState, which takes care of disabling
|
||||
|
|
|
@ -28,7 +28,7 @@ import org.apache.commons.logging.Log;
|
|||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HRegionInterface;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||
import org.apache.hadoop.hbase.HServerInfo;
|
||||
import org.apache.hadoop.hbase.MasterNotRunningException;
|
||||
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
/**
|
||||
* Implementors of this interface want to be notified when an HRegion
|
|
@ -17,7 +17,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
|
@ -17,7 +17,9 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import org.apache.hadoop.hbase.HScannerInterface;
|
||||
|
||||
/**
|
||||
* Internally, we need to be able to determine if the scanner is doing wildcard
|
|
@ -17,7 +17,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.FileNotFoundException;
|
||||
|
@ -43,6 +43,11 @@ import org.apache.hadoop.io.Text;
|
|||
import org.apache.hadoop.io.SequenceFile.CompressionType;
|
||||
import org.apache.hadoop.io.SequenceFile.Reader;
|
||||
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
|
||||
/**
|
||||
* HLog stores all the edits to the HStore.
|
||||
*
|
|
@ -17,13 +17,15 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||
import org.apache.hadoop.io.*;
|
||||
|
||||
import java.io.*;
|
||||
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
|
||||
/**
|
||||
* A log value.
|
||||
*
|
|
@ -17,7 +17,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import org.apache.hadoop.io.*;
|
||||
|
|
@ -17,7 +17,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
|
@ -47,6 +47,15 @@ import org.apache.hadoop.io.Text;
|
|||
import org.apache.hadoop.io.WritableUtils;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HScannerInterface;
|
||||
import org.apache.hadoop.hbase.DroppedSnapshotException;
|
||||
import org.apache.hadoop.hbase.WrongRegionException;
|
||||
|
||||
/**
|
||||
* HRegion stores data for a certain region of a table. It stores all columns
|
||||
* for each row. A given table consists of one or more HRegions.
|
||||
|
@ -96,7 +105,7 @@ public class HRegion implements HConstants {
|
|||
* HRegionServer. Returns a brand-new active HRegion, also
|
||||
* running on the current HRegionServer.
|
||||
*/
|
||||
static HRegion closeAndMerge(final HRegion srcA, final HRegion srcB)
|
||||
public static HRegion closeAndMerge(final HRegion srcA, final HRegion srcB)
|
||||
throws IOException {
|
||||
|
||||
HRegion a = srcA;
|
||||
|
@ -482,7 +491,7 @@ public class HRegion implements HConstants {
|
|||
}
|
||||
|
||||
/** @return region id */
|
||||
long getRegionId() {
|
||||
public long getRegionId() {
|
||||
return this.regionInfo.getRegionId();
|
||||
}
|
||||
|
||||
|
@ -492,7 +501,7 @@ public class HRegion implements HConstants {
|
|||
}
|
||||
|
||||
/** @return HTableDescriptor for this region */
|
||||
HTableDescriptor getTableDesc() {
|
||||
public HTableDescriptor getTableDesc() {
|
||||
return this.regionInfo.getTableDesc();
|
||||
}
|
||||
|
||||
|
@ -502,17 +511,17 @@ public class HRegion implements HConstants {
|
|||
}
|
||||
|
||||
/** @return Configuration object */
|
||||
HBaseConfiguration getConf() {
|
||||
public HBaseConfiguration getConf() {
|
||||
return this.conf;
|
||||
}
|
||||
|
||||
/** @return region directory Path */
|
||||
Path getRegionDir() {
|
||||
public Path getRegionDir() {
|
||||
return this.regiondir;
|
||||
}
|
||||
|
||||
/** @return FileSystem being used by this region */
|
||||
FileSystem getFilesystem() {
|
||||
public FileSystem getFilesystem() {
|
||||
return this.fs;
|
||||
}
|
||||
|
||||
|
@ -533,11 +542,11 @@ public class HRegion implements HConstants {
|
|||
* splitable or not (Its not splitable if region has a store that has a
|
||||
* reference store file).
|
||||
*/
|
||||
HStore.HStoreSize largestHStore(Text midkey) {
|
||||
HStore.HStoreSize biggest = null;
|
||||
public HStoreSize largestHStore(Text midkey) {
|
||||
HStoreSize biggest = null;
|
||||
boolean splitable = true;
|
||||
for (HStore h: stores.values()) {
|
||||
HStore.HStoreSize size = h.size(midkey);
|
||||
HStoreSize size = h.size(midkey);
|
||||
// If we came across a reference down in the store, then propagate
|
||||
// fact that region is not splitable.
|
||||
if (splitable) {
|
||||
|
@ -670,7 +679,7 @@ public class HRegion implements HConstants {
|
|||
* Check it for a midKey value on return.
|
||||
*/
|
||||
boolean needsSplit(Text midKey) {
|
||||
HStore.HStoreSize biggest = largestHStore(midKey);
|
||||
HStoreSize biggest = largestHStore(midKey);
|
||||
if (biggest == null || midKey.getLength() == 0 ||
|
||||
(midKey.equals(getStartKey()) && midKey.equals(getEndKey())) ) {
|
||||
return false;
|
||||
|
@ -701,7 +710,7 @@ public class HRegion implements HConstants {
|
|||
* @return
|
||||
* @throws IOException
|
||||
*/
|
||||
boolean compactIfNeeded() throws IOException {
|
||||
public boolean compactIfNeeded() throws IOException {
|
||||
boolean needsCompaction = false;
|
||||
for (HStore store: stores.values()) {
|
||||
if (store.needsCompaction()) {
|
||||
|
@ -761,7 +770,7 @@ public class HRegion implements HConstants {
|
|||
* conflicts with a region split, and that cannot happen because the region
|
||||
* server does them sequentially and not in parallel.
|
||||
*/
|
||||
boolean compactStores() throws IOException {
|
||||
public boolean compactStores() throws IOException {
|
||||
if (this.closed.get()) {
|
||||
return false;
|
||||
}
|
||||
|
@ -821,7 +830,7 @@ public class HRegion implements HConstants {
|
|||
* @throws DroppedSnapshotException Thrown when replay of hlog is required
|
||||
* because a Snapshot was not properly persisted.
|
||||
*/
|
||||
boolean flushcache() throws IOException {
|
||||
public boolean flushcache() throws IOException {
|
||||
if (this.closed.get()) {
|
||||
return false;
|
||||
}
|
||||
|
@ -1603,7 +1612,7 @@ public class HRegion implements HConstants {
|
|||
return regionInfo.getRegionName().toString();
|
||||
}
|
||||
|
||||
private Path getBaseDir() {
|
||||
public Path getBaseDir() {
|
||||
return this.basedir;
|
||||
}
|
||||
|
||||
|
@ -1903,7 +1912,7 @@ public class HRegion implements HConstants {
|
|||
* @return Path of HRegion directory
|
||||
* @see HRegionInfo#encodeRegionName(Text)
|
||||
*/
|
||||
static Path getRegionDir(final Path tabledir, final String name) {
|
||||
public static Path getRegionDir(final Path tabledir, final String name) {
|
||||
return new Path(tabledir, name);
|
||||
}
|
||||
|
|
@ -17,7 +17,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -27,6 +27,8 @@ import org.apache.hadoop.hbase.io.BatchUpdate;
|
|||
import org.apache.hadoop.hbase.io.HbaseMapWritable;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.ipc.VersionedProtocol;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.NotServingRegionException;
|
||||
|
||||
/**
|
||||
* Clients interact with HRegionServers using a handle to the HRegionInterface.
|
|
@ -17,7 +17,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.lang.Thread.UncaughtExceptionHandler;
|
||||
|
@ -66,9 +66,28 @@ import org.apache.hadoop.io.Writable;
|
|||
import org.apache.hadoop.ipc.Server;
|
||||
import org.apache.hadoop.net.DNS;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.hbase.master.HMasterRegionInterface;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HServerInfo;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HMsg;
|
||||
import org.apache.hadoop.hbase.Leases;
|
||||
import org.apache.hadoop.hbase.HServerAddress;
|
||||
import org.apache.hadoop.hbase.RegionServerRunningException;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.NotServingRegionException;
|
||||
import org.apache.hadoop.hbase.HScannerInterface;
|
||||
import org.apache.hadoop.hbase.LeaseListener;
|
||||
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
||||
import org.apache.hadoop.hbase.DroppedSnapshotException;
|
||||
import org.apache.hadoop.hbase.HServerLoad;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.UnknownScannerException;
|
||||
import org.apache.hadoop.hbase.LocalHBaseCluster;
|
||||
|
||||
import org.apache.hadoop.hbase.client.HTable;
|
||||
|
||||
import org.apache.hadoop.hbase.master.HMasterRegionInterface;
|
||||
|
||||
/**
|
||||
* HRegionServer makes a set of HRegions available to clients. It checks in with
|
||||
* the HMaster. There are many HRegionServers in a single HBase deployment.
|
||||
|
@ -971,7 +990,7 @@ public class HRegionServer implements HConstants, HRegionInterface, Runnable {
|
|||
* in an orderly fashion. Used by unit tests and called by {@link Flusher}
|
||||
* if it judges server needs to be restarted.
|
||||
*/
|
||||
synchronized void stop() {
|
||||
public synchronized void stop() {
|
||||
this.stopRequested.set(true);
|
||||
notifyAll(); // Wakes run() if it is sleeping
|
||||
}
|
||||
|
@ -982,7 +1001,7 @@ public class HRegionServer implements HConstants, HRegionInterface, Runnable {
|
|||
* Used unit testing and on catastrophic events such as HDFS is yanked out
|
||||
* from under hbase or we OOME.
|
||||
*/
|
||||
synchronized void abort() {
|
||||
public synchronized void abort() {
|
||||
this.abortRequested = true;
|
||||
stop();
|
||||
}
|
||||
|
@ -1570,6 +1589,10 @@ public class HRegionServer implements HConstants, HRegionInterface, Runnable {
|
|||
public HServerInfo getServerInfo() {
|
||||
return this.serverInfo;
|
||||
}
|
||||
|
||||
public InfoServer getInfoServer() {
|
||||
return infoServer;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Immutable list of this servers regions.
|
|
@ -17,7 +17,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
|
@ -50,11 +50,19 @@ import org.apache.hadoop.io.Text;
|
|||
import org.apache.hadoop.io.Writable;
|
||||
import org.apache.hadoop.io.WritableComparable;
|
||||
import org.apache.hadoop.util.StringUtils;
|
||||
import org.apache.hadoop.hbase.BloomFilterDescriptor;
|
||||
import org.onelab.filter.BloomFilter;
|
||||
import org.onelab.filter.CountingBloomFilter;
|
||||
import org.onelab.filter.Filter;
|
||||
import org.onelab.filter.RetouchedBloomFilter;
|
||||
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
||||
|
||||
|
||||
/**
|
||||
* HStore maintains a bunch of data files. It is responsible for maintaining
|
||||
* the memory/file hierarchy and for periodic flushes to disk and compacting
|
||||
|
@ -932,7 +940,7 @@ public class HStore implements HConstants {
|
|||
}
|
||||
|
||||
BloomFilterDescriptor.BloomFilterType type =
|
||||
family.getBloomFilter().filterType;
|
||||
family.getBloomFilter().getType();
|
||||
|
||||
switch(type) {
|
||||
|
||||
|
@ -964,25 +972,25 @@ public class HStore implements HConstants {
|
|||
}
|
||||
|
||||
BloomFilterDescriptor.BloomFilterType type =
|
||||
family.getBloomFilter().filterType;
|
||||
family.getBloomFilter().getType();
|
||||
|
||||
switch(type) {
|
||||
|
||||
case BLOOMFILTER:
|
||||
bloomFilter = new BloomFilter(family.getBloomFilter().vectorSize,
|
||||
family.getBloomFilter().nbHash);
|
||||
bloomFilter = new BloomFilter(family.getBloomFilter().getVectorSize(),
|
||||
family.getBloomFilter().getNbHash());
|
||||
break;
|
||||
|
||||
case COUNTING_BLOOMFILTER:
|
||||
bloomFilter =
|
||||
new CountingBloomFilter(family.getBloomFilter().vectorSize,
|
||||
family.getBloomFilter().nbHash);
|
||||
new CountingBloomFilter(family.getBloomFilter().getVectorSize(),
|
||||
family.getBloomFilter().getNbHash());
|
||||
break;
|
||||
|
||||
case RETOUCHED_BLOOMFILTER:
|
||||
bloomFilter =
|
||||
new RetouchedBloomFilter(family.getBloomFilter().vectorSize,
|
||||
family.getBloomFilter().nbHash);
|
||||
new RetouchedBloomFilter(family.getBloomFilter().getVectorSize(),
|
||||
family.getBloomFilter().getNbHash());
|
||||
}
|
||||
}
|
||||
return bloomFilter;
|
||||
|
@ -1963,37 +1971,6 @@ public class HStore implements HConstants {
|
|||
return target.matchesRowCol(origin);
|
||||
}
|
||||
|
||||
/*
|
||||
* Data structure to hold result of a look at store file sizes.
|
||||
*/
|
||||
static class HStoreSize {
|
||||
final long aggregate;
|
||||
final long largest;
|
||||
boolean splitable;
|
||||
|
||||
HStoreSize(final long a, final long l, final boolean s) {
|
||||
this.aggregate = a;
|
||||
this.largest = l;
|
||||
this.splitable = s;
|
||||
}
|
||||
|
||||
long getAggregate() {
|
||||
return this.aggregate;
|
||||
}
|
||||
|
||||
long getLargest() {
|
||||
return this.largest;
|
||||
}
|
||||
|
||||
boolean isSplitable() {
|
||||
return this.splitable;
|
||||
}
|
||||
|
||||
void setSplitable(final boolean s) {
|
||||
this.splitable = s;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Gets size for the store.
|
||||
*
|
|
@ -17,7 +17,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import java.io.DataInput;
|
||||
import java.io.DataInputStream;
|
||||
|
@ -46,6 +46,8 @@ import org.apache.hadoop.io.WritableComparable;
|
|||
import org.onelab.filter.Filter;
|
||||
import org.onelab.filter.Key;
|
||||
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
|
||||
/**
|
||||
* A HStore data file. HStores usually have one or more of these files. They
|
|
@ -17,10 +17,12 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import org.apache.hadoop.hbase.io.TextSequence;
|
||||
import org.apache.hadoop.hbase.InvalidColumnNameException;
|
||||
import org.apache.hadoop.io.*;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
|
||||
import java.io.*;
|
||||
import java.nio.ByteBuffer;
|
|
@ -0,0 +1,33 @@
|
|||
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
/*
|
||||
* Data structure to hold result of a look at store file sizes.
|
||||
*/
|
||||
public class HStoreSize {
|
||||
final long aggregate;
|
||||
final long largest;
|
||||
boolean splitable;
|
||||
|
||||
HStoreSize(final long a, final long l, final boolean s) {
|
||||
this.aggregate = a;
|
||||
this.largest = l;
|
||||
this.splitable = s;
|
||||
}
|
||||
|
||||
public long getAggregate() {
|
||||
return this.aggregate;
|
||||
}
|
||||
|
||||
public long getLargest() {
|
||||
return this.largest;
|
||||
}
|
||||
|
||||
public boolean isSplitable() {
|
||||
return this.splitable;
|
||||
}
|
||||
|
||||
public void setSplitable(final boolean s) {
|
||||
this.splitable = s;
|
||||
}
|
||||
}
|
|
@ -18,7 +18,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
/**
|
||||
* Mechanism by which the HLog requests a log roll
|
|
@ -17,7 +17,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import org.apache.hadoop.io.Text;
|
||||
|
|
@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.client.HTable;
|
|||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HScannerInterface;
|
||||
import org.apache.hadoop.hbase.HStoreKey;
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||
import org.apache.hadoop.hbase.util.JenkinsHash;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.mortbay.servlet.MultiPartResponse;
|
||||
|
|
|
@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
|
|||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HScannerInterface;
|
||||
import org.apache.hadoop.hbase.HStoreKey;
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.MasterNotRunningException;
|
||||
import org.apache.hadoop.hbase.thrift.generated.AlreadyExists;
|
||||
|
|
|
@ -56,14 +56,15 @@ import org.apache.hadoop.util.ToolRunner;
|
|||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HLog;
|
||||
import org.apache.hadoop.hbase.HRegion;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HScannerInterface;
|
||||
import org.apache.hadoop.hbase.HStore;
|
||||
import org.apache.hadoop.hbase.HStoreKey;
|
||||
import org.apache.hadoop.hbase.MasterNotRunningException;
|
||||
|
||||
import org.apache.hadoop.hbase.regionserver.HLog;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
import org.apache.hadoop.hbase.regionserver.HStore;
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||
|
||||
/**
|
||||
* Perform a file system upgrade to convert older file layouts to that
|
||||
* supported by HADOOP-2478, and then to the form supported by HBASE-69
|
||||
|
|
|
@ -28,6 +28,8 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
|||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
|
||||
/** Abstract base class for merge tests */
|
||||
public abstract class AbstractMergeTestBase extends HBaseTestCase {
|
||||
static final Logger LOG =
|
||||
|
|
|
@ -46,6 +46,10 @@ import org.apache.hadoop.io.Writable;
|
|||
import org.apache.hadoop.hbase.client.HTable;
|
||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||
|
||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||
|
||||
/**
|
||||
* Additional scanner tests.
|
||||
* {@link TestScanner} does a custom setup/takedown not conducive
|
||||
|
|
|
@ -34,6 +34,8 @@ import org.apache.hadoop.hbase.io.BatchUpdate;
|
|||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.hbase.client.HTable;
|
||||
|
||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
|
||||
/**
|
||||
* Abstract base class for test cases. Performs all static initialization
|
||||
*/
|
||||
|
@ -157,7 +159,7 @@ public abstract class HBaseTestCase extends TestCase {
|
|||
|
||||
protected HRegion openClosedRegion(final HRegion closedRegion)
|
||||
throws IOException {
|
||||
return new HRegion(closedRegion.basedir, closedRegion.getLog(),
|
||||
return new HRegion(closedRegion.getBaseDir(), closedRegion.getLog(),
|
||||
closedRegion.getFilesystem(), closedRegion.getConf(),
|
||||
closedRegion.getRegionInfo(), null, null);
|
||||
}
|
||||
|
|
|
@ -30,6 +30,8 @@ import org.apache.log4j.Logger;
|
|||
|
||||
import org.apache.hadoop.hbase.util.FSUtils;
|
||||
import org.apache.hadoop.hbase.master.HMaster;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionServer;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
|
||||
/**
|
||||
* This class creates a single process HBase cluster. One thread is created for
|
||||
|
@ -196,7 +198,7 @@ public class MiniHBaseCluster implements HConstants {
|
|||
public void abortRegionServer(int serverNumber) {
|
||||
HRegionServer server =
|
||||
this.hbaseCluster.getRegionServers().get(serverNumber).getRegionServer();
|
||||
LOG.info("Aborting " + server.serverInfo.toString());
|
||||
LOG.info("Aborting " + server.getServerInfo().toString());
|
||||
server.abort();
|
||||
}
|
||||
|
||||
|
@ -262,10 +264,10 @@ public class MiniHBaseCluster implements HConstants {
|
|||
* Call flushCache on all regions on all participating regionservers.
|
||||
* @throws IOException
|
||||
*/
|
||||
void flushcache() throws IOException {
|
||||
public void flushcache() throws IOException {
|
||||
for (LocalHBaseCluster.RegionServerThread t:
|
||||
this.hbaseCluster.getRegionServers()) {
|
||||
for(HRegion r: t.getRegionServer().onlineRegions.values() ) {
|
||||
for(HRegion r: t.getRegionServer().getOnlineRegions().values() ) {
|
||||
r.flushcache();
|
||||
}
|
||||
}
|
||||
|
|
|
@ -33,6 +33,10 @@ import org.apache.hadoop.hbase.util.Writables;
|
|||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.hbase.client.HTable;
|
||||
|
||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegionServer;
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||
|
||||
/**
|
||||
* Utility class to build a table of multiple regions.
|
||||
*/
|
||||
|
@ -99,7 +103,7 @@ public class MultiRegionTable extends HBaseTestCase {
|
|||
continue;
|
||||
}
|
||||
LOG.info("Region location: " + hri);
|
||||
r = server.onlineRegions.get(hri.getRegionName());
|
||||
r = server.getOnlineRegions().get(hri.getRegionName());
|
||||
if (r != null) {
|
||||
break;
|
||||
}
|
||||
|
@ -335,7 +339,7 @@ public class MultiRegionTable extends HBaseTestCase {
|
|||
LOG.info("Starting compaction");
|
||||
for (LocalHBaseCluster.RegionServerThread thread:
|
||||
cluster.getRegionThreads()) {
|
||||
SortedMap<Text, HRegion> regions = thread.getRegionServer().onlineRegions;
|
||||
SortedMap<Text, HRegion> regions = thread.getRegionServer().getOnlineRegions();
|
||||
|
||||
// Retry if ConcurrentModification... alternative of sync'ing is not
|
||||
// worth it for sake of unit test.
|
||||
|
|
|
@ -49,6 +49,8 @@ import org.apache.log4j.Logger;
|
|||
import org.apache.hadoop.hbase.client.HTable;
|
||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||
|
||||
/**
|
||||
* Script used evaluating HBase performance and scalability. Runs a HBase
|
||||
* client that steps through one of a set of hardcoded tests or 'experiments'
|
||||
|
|
|
@ -21,6 +21,7 @@ package org.apache.hadoop.hbase;
|
|||
import org.apache.hadoop.io.Text;
|
||||
|
||||
import junit.framework.TestCase;
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||
|
||||
/**
|
||||
* Test comparing HBase objects.
|
||||
|
|
|
@ -27,6 +27,7 @@ import java.util.TreeMap;
|
|||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||
import org.apache.hadoop.hbase.client.HTable;
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||
|
||||
/**
|
||||
* Test HBase Master and Region servers, client API
|
||||
|
|
|
@ -63,7 +63,7 @@ public class TestInfoServers extends HBaseTestCase {
|
|||
assertHasExpectedContent(new URL("http://localhost:" + port +
|
||||
"/index.html"), "Master");
|
||||
port = miniHbase.getRegionThreads().get(0).getRegionServer().
|
||||
infoServer.getPort();
|
||||
getInfoServer().getPort();
|
||||
assertHasExpectedContent(new URL("http://localhost:" + port +
|
||||
"/index.html"), "Region Server");
|
||||
} finally {
|
||||
|
|
|
@ -30,6 +30,8 @@ import java.util.TreeMap;
|
|||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||
import org.apache.hadoop.hbase.client.HTable;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||
import org.apache.hadoop.hbase.io.BatchUpdate;
|
||||
|
||||
/** test the scanner API at all levels */
|
||||
|
|
|
@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.HBaseClusterTestCase;
|
|||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HScannerInterface;
|
||||
import org.apache.hadoop.hbase.HStoreKey;
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
|
||||
/**
|
||||
|
|
|
@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
|
|||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.TableNotFoundException;
|
||||
import org.apache.hadoop.hbase.HScannerInterface;
|
||||
import org.apache.hadoop.hbase.HStoreKey;
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||
|
||||
/**
|
||||
* Tests HTable
|
||||
|
|
|
@ -30,7 +30,7 @@ import java.util.TreeMap;
|
|||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HLogEdit;
|
||||
import org.apache.hadoop.hbase.regionserver.HLogEdit;
|
||||
import org.apache.hadoop.io.Text;
|
||||
|
||||
/**
|
||||
|
|
|
@ -37,9 +37,9 @@ import org.apache.hadoop.fs.Path;
|
|||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HRegion;
|
||||
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||
import org.apache.hadoop.hbase.HScannerInterface;
|
||||
import org.apache.hadoop.hbase.HStoreKey;
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||
import org.apache.hadoop.hbase.client.HTable;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.MiniHBaseCluster;
|
||||
|
|
|
@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.client.HBaseAdmin;
|
|||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HScannerInterface;
|
||||
import org.apache.hadoop.hbase.HStoreKey;
|
||||
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||
import org.apache.hadoop.hbase.client.HTable;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.MiniHBaseCluster;
|
||||
|
|
|
@ -17,13 +17,15 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.hbase.io.BatchUpdate;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HServerAddress;
|
||||
import org.apache.hadoop.io.Text;
|
||||
|
||||
/**
|
|
@ -17,7 +17,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -26,9 +26,14 @@ import org.apache.hadoop.dfs.MiniDFSCluster;
|
|||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.io.MapFile;
|
||||
import org.apache.hadoop.io.Text;
|
||||
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.StaticTestEnvironment;
|
||||
|
||||
/**
|
||||
* Test compactions
|
||||
*/
|
|
@ -17,7 +17,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
@ -25,6 +25,10 @@ import java.util.Map;
|
|||
import org.apache.hadoop.dfs.MiniDFSCluster;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.commons.logging.*;
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.StaticTestEnvironment;
|
||||
|
||||
/**
|
||||
* Test the functionality of deleteAll.
|
|
@ -17,7 +17,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
@ -26,6 +26,11 @@ import org.apache.commons.logging.Log;
|
|||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.dfs.MiniDFSCluster;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.StaticTestEnvironment;
|
||||
|
||||
/**
|
||||
* Test the functionality of deleteFamily.
|
|
@ -17,7 +17,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Iterator;
|
||||
|
@ -27,9 +27,18 @@ import org.apache.commons.logging.Log;
|
|||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.dfs.MiniDFSCluster;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
|
||||
import org.apache.hadoop.hbase.util.Writables;
|
||||
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HServerAddress;
|
||||
import org.apache.hadoop.hbase.StaticTestEnvironment;
|
||||
|
||||
|
||||
/** Test case for get */
|
||||
public class TestGet extends HBaseTestCase {
|
||||
private static final Log LOG = LogFactory.getLog(TestGet.class.getName());
|
|
@ -17,7 +17,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
@ -27,6 +27,11 @@ import org.apache.hadoop.dfs.MiniDFSCluster;
|
|||
import org.apache.hadoop.hbase.filter.StopRowFilter;
|
||||
import org.apache.hadoop.hbase.filter.WhileMatchRowFilter;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HScannerInterface;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
|
||||
|
||||
/**
|
|
@ -17,7 +17,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.TreeMap;
|
||||
|
@ -28,6 +28,10 @@ import org.apache.hadoop.io.SequenceFile;
|
|||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.io.SequenceFile.Reader;
|
||||
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.StaticTestEnvironment;
|
||||
|
||||
/** JUnit test case for HLog */
|
||||
public class TestHLog extends HBaseTestCase implements HConstants {
|
||||
private Path dir;
|
|
@ -17,7 +17,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.io.UnsupportedEncodingException;
|
||||
|
@ -28,6 +28,8 @@ import junit.framework.TestCase;
|
|||
|
||||
import org.apache.hadoop.io.Text;
|
||||
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
|
||||
/** memcache test case */
|
||||
public class TestHMemcache extends TestCase {
|
||||
|
|
@ -17,7 +17,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import java.io.File;
|
||||
import java.io.IOException;
|
||||
|
@ -29,8 +29,14 @@ import java.util.TreeMap;
|
|||
import org.apache.hadoop.dfs.MiniDFSCluster;
|
||||
import org.apache.hadoop.fs.Path;
|
||||
import org.apache.hadoop.io.Text;
|
||||
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.hadoop.hbase.StaticTestEnvironment;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.HScannerInterface;
|
||||
|
||||
/**
|
||||
* Basic stand-alone testing of HRegion.
|
|
@ -17,7 +17,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
|
@ -30,7 +30,10 @@ import org.apache.hadoop.io.MapFile;
|
|||
import org.apache.hadoop.io.SequenceFile;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.io.WritableComparable;
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.StaticTestEnvironment;
|
||||
/**
|
||||
* Test HStoreFile
|
||||
*/
|
|
@ -17,7 +17,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
@ -28,6 +28,13 @@ import org.apache.hadoop.dfs.MiniDFSCluster;
|
|||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.hbase.client.HTable;
|
||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.MiniHBaseCluster;
|
||||
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.StaticTestEnvironment;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
|
||||
/**
|
||||
* Test log deletion as logs are rolled.
|
|
@ -17,7 +17,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Collection;
|
||||
|
@ -32,6 +32,13 @@ import org.apache.hadoop.hbase.io.BatchUpdate;
|
|||
import org.apache.hadoop.hbase.client.HTable;
|
||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||
|
||||
import org.apache.hadoop.hbase.HBaseClusterTestCase;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
import org.apache.hadoop.hbase.LocalHBaseCluster;
|
||||
import org.apache.hadoop.hbase.HScannerInterface;
|
||||
|
||||
/**
|
||||
* Tests region server failover when a region server exits both cleanly and
|
||||
* when it aborts.
|
|
@ -17,7 +17,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import java.io.ByteArrayOutputStream;
|
||||
import java.io.DataOutputStream;
|
||||
|
@ -27,6 +27,13 @@ import java.util.TreeMap;
|
|||
import org.apache.hadoop.dfs.MiniDFSCluster;
|
||||
import org.apache.hadoop.hbase.util.Writables;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.HRegionInfo;
|
||||
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HScannerInterface;
|
||||
import org.apache.hadoop.hbase.HServerAddress;
|
||||
import org.apache.hadoop.hbase.StaticTestEnvironment;
|
||||
|
||||
/**
|
||||
* Test of a long-lived scanner validating as we go.
|
|
@ -17,7 +17,7 @@
|
|||
* See the License for the specific language governing permissions and
|
||||
* limitations under the License.
|
||||
*/
|
||||
package org.apache.hadoop.hbase;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.TreeMap;
|
||||
|
@ -28,6 +28,11 @@ import org.apache.hadoop.dfs.MiniDFSCluster;
|
|||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.log4j.Level;
|
||||
import org.apache.log4j.Logger;
|
||||
import org.apache.hadoop.hbase.MultiRegionTable;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.HScannerInterface;
|
||||
import org.apache.hadoop.hbase.StaticTestEnvironment;
|
||||
|
||||
/**
|
||||
* {@Link TestHRegion} does a split but this TestCase adds testing of fast
|
|
@ -16,7 +16,7 @@
|
|||
* limitations under the License.
|
||||
*/
|
||||
|
||||
package org.apache.hadoop.hbase;
|
||||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.TreeMap;
|
||||
|
@ -28,6 +28,13 @@ import org.apache.hadoop.hbase.util.Writables;
|
|||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.hbase.client.HTable;
|
||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.hbase.StaticTestEnvironment;
|
||||
import org.apache.hadoop.hbase.MiniHBaseCluster;
|
||||
import org.apache.hadoop.hbase.HScannerInterface;
|
||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||
|
||||
/**
|
||||
* Tests user specifiable time stamps putting, getting and scanning. Also
|
|
@ -2,8 +2,8 @@
|
|||
import="java.util.*"
|
||||
import="org.apache.hadoop.io.Text"
|
||||
import="org.apache.hadoop.util.VersionInfo"
|
||||
import="org.apache.hadoop.hbase.HRegionServer"
|
||||
import="org.apache.hadoop.hbase.HRegion"
|
||||
import="org.apache.hadoop.hbase.regionserver.HRegionServer"
|
||||
import="org.apache.hadoop.hbase.regionserver.HRegion"
|
||||
import="org.apache.hadoop.hbase.HConstants"
|
||||
import="org.apache.hadoop.hbase.HServerInfo"
|
||||
import="org.apache.hadoop.hbase.HRegionInfo" %><%
|
||||
|
|
Loading…
Reference in New Issue