419 Move RegionServer and related classes into regionserver package
git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@630550 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
1635b75918
commit
dcbc0773fc
|
@ -162,7 +162,7 @@ if [ "$COMMAND" = "shell" ] ; then
|
||||||
elif [ "$COMMAND" = "master" ] ; then
|
elif [ "$COMMAND" = "master" ] ; then
|
||||||
CLASS='org.apache.hadoop.hbase.master.HMaster'
|
CLASS='org.apache.hadoop.hbase.master.HMaster'
|
||||||
elif [ "$COMMAND" = "regionserver" ] ; then
|
elif [ "$COMMAND" = "regionserver" ] ; then
|
||||||
CLASS='org.apache.hadoop.hbase.HRegionServer'
|
CLASS='org.apache.hadoop.hbase.regionserver.HRegionServer'
|
||||||
elif [ "$COMMAND" = "rest" ] ; then
|
elif [ "$COMMAND" = "rest" ] ; then
|
||||||
CLASS='org.apache.hadoop.hbase.rest.Dispatcher'
|
CLASS='org.apache.hadoop.hbase.rest.Dispatcher'
|
||||||
elif [ "$COMMAND" = "thrift" ] ; then
|
elif [ "$COMMAND" = "thrift" ] ; then
|
||||||
|
|
|
@ -72,7 +72,7 @@
|
||||||
</property>
|
</property>
|
||||||
<property>
|
<property>
|
||||||
<name>hbase.regionserver.class</name>
|
<name>hbase.regionserver.class</name>
|
||||||
<value>org.apache.hadoop.hbase.HRegionInterface</value>
|
<value>org.apache.hadoop.hbase.regionserver.HRegionInterface</value>
|
||||||
<description>An interface that is assignable to HRegionInterface. Used in HClient for
|
<description>An interface that is assignable to HRegionInterface. Used in HClient for
|
||||||
opening proxy to remote region server.
|
opening proxy to remote region server.
|
||||||
</description>
|
</description>
|
||||||
|
|
|
@ -20,6 +20,7 @@
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase;
|
||||||
|
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* HConstants holds a bunch of HBase-related constants
|
* HConstants holds a bunch of HBase-related constants
|
||||||
|
|
|
@ -37,6 +37,11 @@ import org.apache.hadoop.hbase.io.BatchUpdate;
|
||||||
import org.apache.hadoop.hbase.client.HTable;
|
import org.apache.hadoop.hbase.client.HTable;
|
||||||
import org.apache.hadoop.hbase.client.HConnection;
|
import org.apache.hadoop.hbase.client.HConnection;
|
||||||
import org.apache.hadoop.hbase.client.HConnectionManager;
|
import org.apache.hadoop.hbase.client.HConnectionManager;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HLog;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A non-instantiable class that has a static method capable of compacting
|
* A non-instantiable class that has a static method capable of compacting
|
||||||
* a table by merging adjacent regions that have grown too small.
|
* a table by merging adjacent regions that have grown too small.
|
||||||
|
|
|
@ -26,6 +26,7 @@ import java.util.Map;
|
||||||
import java.util.SortedMap;
|
import java.util.SortedMap;
|
||||||
|
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* HScannerInterface iterates through a set of rows. It's implemented by
|
* HScannerInterface iterates through a set of rows. It's implemented by
|
||||||
|
|
|
@ -32,6 +32,8 @@ import org.apache.hadoop.util.ReflectionUtils;
|
||||||
import org.apache.hadoop.hbase.master.HMaster;
|
import org.apache.hadoop.hbase.master.HMaster;
|
||||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HRegionServer;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class creates a single process HBase cluster. One thread is created for
|
* This class creates a single process HBase cluster. One thread is created for
|
||||||
* a master and one per region server.
|
* a master and one per region server.
|
||||||
|
@ -159,7 +161,7 @@ public class LocalHBaseCluster implements HConstants {
|
||||||
while (regionServerThread.isAlive()) {
|
while (regionServerThread.isAlive()) {
|
||||||
try {
|
try {
|
||||||
LOG.info("Waiting on " +
|
LOG.info("Waiting on " +
|
||||||
regionServerThread.getRegionServer().serverInfo.toString());
|
regionServerThread.getRegionServer().getServerInfo().toString());
|
||||||
regionServerThread.join();
|
regionServerThread.join();
|
||||||
} catch (InterruptedException e) {
|
} catch (InterruptedException e) {
|
||||||
e.printStackTrace();
|
e.printStackTrace();
|
||||||
|
|
|
@ -41,10 +41,11 @@ import org.apache.hadoop.hbase.HRegionLocation;
|
||||||
import org.apache.hadoop.hbase.TableNotFoundException;
|
import org.apache.hadoop.hbase.TableNotFoundException;
|
||||||
import org.apache.hadoop.hbase.TableExistsException;
|
import org.apache.hadoop.hbase.TableExistsException;
|
||||||
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
||||||
import org.apache.hadoop.hbase.HRegionInterface;
|
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
import org.apache.hadoop.hbase.HRegionLocation;
|
import org.apache.hadoop.hbase.HRegionLocation;
|
||||||
import org.apache.hadoop.hbase.HStoreKey;
|
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Provides administrative functions for HBase
|
* Provides administrative functions for HBase
|
||||||
|
|
|
@ -28,7 +28,8 @@ import org.apache.hadoop.hbase.MasterNotRunningException;
|
||||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
import org.apache.hadoop.hbase.HRegionLocation;
|
import org.apache.hadoop.hbase.HRegionLocation;
|
||||||
import org.apache.hadoop.hbase.HServerAddress;
|
import org.apache.hadoop.hbase.HServerAddress;
|
||||||
import org.apache.hadoop.hbase.HRegionInterface;
|
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
*
|
*
|
||||||
*/
|
*/
|
||||||
|
|
|
@ -42,20 +42,19 @@ import org.apache.hadoop.hbase.util.SoftSortedMap;
|
||||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.hbase.HRegionInterface;
|
|
||||||
import org.apache.hadoop.hbase.MasterNotRunningException;
|
import org.apache.hadoop.hbase.MasterNotRunningException;
|
||||||
import org.apache.hadoop.hbase.HRegionLocation;
|
import org.apache.hadoop.hbase.HRegionLocation;
|
||||||
import org.apache.hadoop.hbase.HServerAddress;
|
import org.apache.hadoop.hbase.HServerAddress;
|
||||||
import org.apache.hadoop.hbase.LocalHBaseCluster;
|
import org.apache.hadoop.hbase.LocalHBaseCluster;
|
||||||
import org.apache.hadoop.hbase.HStoreKey;
|
|
||||||
import org.apache.hadoop.hbase.HRegionInterface;
|
|
||||||
import org.apache.hadoop.hbase.HRegionLocation;
|
import org.apache.hadoop.hbase.HRegionLocation;
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
import org.apache.hadoop.hbase.HStoreKey;
|
|
||||||
import org.apache.hadoop.hbase.NoServerForRegionException;
|
import org.apache.hadoop.hbase.NoServerForRegionException;
|
||||||
import org.apache.hadoop.hbase.TableNotFoundException;
|
import org.apache.hadoop.hbase.TableNotFoundException;
|
||||||
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A non-instantiable class that manages connections to multiple tables in
|
* A non-instantiable class that manages connections to multiple tables in
|
||||||
* multiple HBase instances
|
* multiple HBase instances
|
||||||
|
|
|
@ -47,14 +47,13 @@ import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.hbase.HRegionLocation;
|
import org.apache.hadoop.hbase.HRegionLocation;
|
||||||
import org.apache.hadoop.hbase.HScannerInterface;
|
import org.apache.hadoop.hbase.HScannerInterface;
|
||||||
import org.apache.hadoop.hbase.HRegionInterface;
|
|
||||||
import org.apache.hadoop.hbase.HStoreKey;
|
|
||||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
||||||
import org.apache.hadoop.hbase.NotServingRegionException;
|
import org.apache.hadoop.hbase.NotServingRegionException;
|
||||||
import org.apache.hadoop.hbase.HRegionInterface;
|
|
||||||
import org.apache.hadoop.hbase.HStoreKey;
|
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Used to communicate with a single HBase table
|
* Used to communicate with a single HBase table
|
||||||
|
|
|
@ -35,7 +35,7 @@ import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.HLogEdit;
|
import org.apache.hadoop.hbase.regionserver.HLogEdit;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Implementation of RowFilterInterface that can filter by rowkey regular
|
* Implementation of RowFilterInterface that can filter by rowkey regular
|
||||||
|
|
|
@ -6,8 +6,8 @@ import javax.servlet.jsp.*;
|
||||||
import java.util.*;
|
import java.util.*;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.util.VersionInfo;
|
import org.apache.hadoop.util.VersionInfo;
|
||||||
import org.apache.hadoop.hbase.HRegionServer;
|
import org.apache.hadoop.hbase.regionserver.HRegionServer;
|
||||||
import org.apache.hadoop.hbase.HRegion;
|
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.HServerInfo;
|
import org.apache.hadoop.hbase.HServerInfo;
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
|
|
|
@ -31,7 +31,7 @@ import java.util.TreeMap;
|
||||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.HScannerInterface;
|
import org.apache.hadoop.hbase.HScannerInterface;
|
||||||
import org.apache.hadoop.hbase.HStoreKey;
|
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
import org.apache.hadoop.hbase.Shell;
|
import org.apache.hadoop.hbase.Shell;
|
||||||
import org.apache.hadoop.hbase.filter.RowFilterInterface;
|
import org.apache.hadoop.hbase.filter.RowFilterInterface;
|
||||||
|
|
|
@ -30,11 +30,12 @@ import java.util.concurrent.atomic.AtomicReference;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configurable;
|
import org.apache.hadoop.conf.Configurable;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.hbase.HStoreKey;
|
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.io.Writable;
|
import org.apache.hadoop.io.Writable;
|
||||||
import org.apache.hadoop.util.ReflectionUtils;
|
import org.apache.hadoop.util.ReflectionUtils;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A Writable Map.
|
* A Writable Map.
|
||||||
* Like {@link org.apache.hadoop.io.MapWritable} but dumb. It will fail
|
* Like {@link org.apache.hadoop.io.MapWritable} but dumb. It will fail
|
||||||
|
|
|
@ -25,7 +25,7 @@ import java.util.ArrayList;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.HStoreKey;
|
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||||
import org.apache.hadoop.io.MapWritable;
|
import org.apache.hadoop.io.MapWritable;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
|
|
@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.mapred;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.HStoreKey;
|
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||||
import org.apache.hadoop.io.MapWritable;
|
import org.apache.hadoop.io.MapWritable;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.mapred.OutputCollector;
|
import org.apache.hadoop.mapred.OutputCollector;
|
||||||
|
|
|
@ -38,7 +38,7 @@ import org.apache.hadoop.mapred.Reporter;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.hbase.HScannerInterface;
|
import org.apache.hadoop.hbase.HScannerInterface;
|
||||||
import org.apache.hadoop.hbase.HStoreKey;
|
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||||
import org.apache.hadoop.hbase.client.HTable;
|
import org.apache.hadoop.hbase.client.HTable;
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.mapred;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hbase.HStoreKey;
|
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||||
import org.apache.hadoop.io.MapWritable;
|
import org.apache.hadoop.io.MapWritable;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.io.Writable;
|
import org.apache.hadoop.io.Writable;
|
||||||
|
|
|
@ -23,7 +23,7 @@ import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
import org.apache.hadoop.hbase.HRegionInterface;
|
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
|
||||||
/** Instantiated to add a column family to a table */
|
/** Instantiated to add a column family to a table */
|
||||||
|
|
|
@ -32,8 +32,6 @@ import org.apache.hadoop.ipc.RemoteException;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.hbase.Chore;
|
import org.apache.hadoop.hbase.Chore;
|
||||||
import org.apache.hadoop.hbase.HRegion;
|
|
||||||
import org.apache.hadoop.hbase.HRegionInterface;
|
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
import org.apache.hadoop.hbase.HServerInfo;
|
import org.apache.hadoop.hbase.HServerInfo;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
@ -44,11 +42,15 @@ import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
||||||
import org.apache.hadoop.hbase.UnknownScannerException;
|
import org.apache.hadoop.hbase.UnknownScannerException;
|
||||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||||
import org.apache.hadoop.hbase.HStoreFile;
|
|
||||||
import org.apache.hadoop.hbase.HStore;
|
|
||||||
import org.apache.hadoop.hbase.HLog;
|
|
||||||
import org.apache.hadoop.hbase.io.BatchUpdate;
|
import org.apache.hadoop.hbase.io.BatchUpdate;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HStoreFile;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HStore;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HLog;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Base HRegion scanner class. Holds utilty common to <code>ROOT</code> and
|
* Base HRegion scanner class. Holds utilty common to <code>ROOT</code> and
|
||||||
* <code>META</code> HRegion scanners.
|
* <code>META</code> HRegion scanners.
|
||||||
|
|
|
@ -25,7 +25,7 @@ import java.util.HashSet;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
import org.apache.hadoop.hbase.HRegionInterface;
|
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||||
import org.apache.hadoop.hbase.io.BatchUpdate;
|
import org.apache.hadoop.hbase.io.BatchUpdate;
|
||||||
import org.apache.hadoop.hbase.util.Writables;
|
import org.apache.hadoop.hbase.util.Writables;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
|
|
@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.master;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
import org.apache.hadoop.hbase.HRegionInterface;
|
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||||
import org.apache.hadoop.hbase.io.BatchUpdate;
|
import org.apache.hadoop.hbase.io.BatchUpdate;
|
||||||
import org.apache.hadoop.hbase.TableNotDisabledException;
|
import org.apache.hadoop.hbase.TableNotDisabledException;
|
||||||
import org.apache.hadoop.hbase.util.Writables;
|
import org.apache.hadoop.hbase.util.Writables;
|
||||||
|
|
|
@ -23,9 +23,9 @@ import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
import org.apache.hadoop.hbase.HRegionInterface;
|
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hbase.HStoreFile;
|
import org.apache.hadoop.hbase.regionserver.HStoreFile;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||||
|
|
||||||
/** Instantiated to remove a column family from a table */
|
/** Instantiated to remove a column family from a table */
|
||||||
class DeleteColumn extends ColumnOperation {
|
class DeleteColumn extends ColumnOperation {
|
||||||
|
|
|
@ -62,24 +62,26 @@ import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.hbase.Leases;
|
import org.apache.hadoop.hbase.Leases;
|
||||||
import org.apache.hadoop.hbase.HServerAddress;
|
import org.apache.hadoop.hbase.HServerAddress;
|
||||||
import org.apache.hadoop.hbase.client.HConnection;
|
|
||||||
import org.apache.hadoop.hbase.client.HConnectionManager;
|
|
||||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
import org.apache.hadoop.hbase.HRegionInterface;
|
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
import org.apache.hadoop.hbase.HServerLoad;
|
import org.apache.hadoop.hbase.HServerLoad;
|
||||||
import org.apache.hadoop.hbase.HRegion;
|
|
||||||
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
||||||
import org.apache.hadoop.hbase.HMsg;
|
import org.apache.hadoop.hbase.HMsg;
|
||||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
|
||||||
import org.apache.hadoop.hbase.LocalHBaseCluster;
|
import org.apache.hadoop.hbase.LocalHBaseCluster;
|
||||||
import org.apache.hadoop.hbase.HStoreKey;
|
|
||||||
import org.apache.hadoop.hbase.HServerInfo;
|
import org.apache.hadoop.hbase.HServerInfo;
|
||||||
import org.apache.hadoop.hbase.TableExistsException;
|
import org.apache.hadoop.hbase.TableExistsException;
|
||||||
import org.apache.hadoop.hbase.MasterNotRunningException;
|
import org.apache.hadoop.hbase.MasterNotRunningException;
|
||||||
import org.apache.hadoop.hbase.LeaseListener;
|
import org.apache.hadoop.hbase.LeaseListener;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.client.HConnection;
|
||||||
|
import org.apache.hadoop.hbase.client.HConnectionManager;
|
||||||
|
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* HMaster is the "master server" for a HBase.
|
* HMaster is the "master server" for a HBase.
|
||||||
* There is only one HMaster for a single HBase deployment.
|
* There is only one HMaster for a single HBase deployment.
|
||||||
|
|
|
@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.master;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||||
import org.apache.hadoop.hbase.HRegionInterface;
|
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
|
||||||
|
|
|
@ -21,7 +21,7 @@ package org.apache.hadoop.hbase.master;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.HRegion;
|
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
||||||
|
|
||||||
|
|
|
@ -25,7 +25,7 @@ import org.apache.hadoop.hbase.HServerAddress;
|
||||||
import org.apache.hadoop.hbase.HServerInfo;
|
import org.apache.hadoop.hbase.HServerInfo;
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
import org.apache.hadoop.hbase.util.Writables;
|
import org.apache.hadoop.hbase.util.Writables;
|
||||||
import org.apache.hadoop.hbase.HRegionInterface;
|
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||||
import org.apache.hadoop.hbase.io.BatchUpdate;
|
import org.apache.hadoop.hbase.io.BatchUpdate;
|
||||||
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.master;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
import org.apache.hadoop.hbase.HRegionInterface;
|
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||||
import org.apache.hadoop.hbase.HServerAddress;
|
import org.apache.hadoop.hbase.HServerAddress;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
|
||||||
|
|
|
@ -31,13 +31,14 @@ import org.apache.hadoop.hbase.HServerAddress;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hbase.HServerInfo;
|
import org.apache.hadoop.hbase.HServerInfo;
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
import org.apache.hadoop.hbase.HRegionInterface;
|
|
||||||
import org.apache.hadoop.hbase.HRegion;
|
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.hbase.io.HbaseMapWritable;
|
import org.apache.hadoop.hbase.io.HbaseMapWritable;
|
||||||
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
||||||
import org.apache.hadoop.hbase.util.Writables;
|
import org.apache.hadoop.hbase.util.Writables;
|
||||||
import org.apache.hadoop.hbase.HLog;
|
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HLog;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Instantiated when a server's lease has expired, meaning it has crashed.
|
* Instantiated when a server's lease has expired, meaning it has crashed.
|
||||||
|
|
|
@ -45,8 +45,8 @@ import org.apache.hadoop.hbase.HServerAddress;
|
||||||
import org.apache.hadoop.hbase.HServerInfo;
|
import org.apache.hadoop.hbase.HServerInfo;
|
||||||
import org.apache.hadoop.hbase.HServerLoad;
|
import org.apache.hadoop.hbase.HServerLoad;
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
import org.apache.hadoop.hbase.HRegion;
|
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||||
import org.apache.hadoop.hbase.HRegionInterface;
|
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||||
import org.apache.hadoop.hbase.HMsg;
|
import org.apache.hadoop.hbase.HMsg;
|
||||||
import org.apache.hadoop.hbase.util.Threads;
|
import org.apache.hadoop.hbase.util.Threads;
|
||||||
import org.apache.hadoop.hbase.io.BatchUpdate;
|
import org.apache.hadoop.hbase.io.BatchUpdate;
|
||||||
|
|
|
@ -29,7 +29,7 @@ import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.hbase.io.HbaseMapWritable;
|
import org.apache.hadoop.hbase.io.HbaseMapWritable;
|
||||||
import org.apache.hadoop.io.Writable;
|
import org.apache.hadoop.io.Writable;
|
||||||
import org.apache.hadoop.hbase.HStoreKey;
|
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||||
|
|
||||||
/*
|
/*
|
||||||
|
|
|
@ -23,13 +23,14 @@ import java.io.IOException;
|
||||||
import java.util.HashSet;
|
import java.util.HashSet;
|
||||||
|
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hbase.HRegion;
|
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
import org.apache.hadoop.hbase.HRegionInterface;
|
|
||||||
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
||||||
import org.apache.hadoop.hbase.io.BatchUpdate;
|
import org.apache.hadoop.hbase.io.BatchUpdate;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Instantiated to delete a table
|
* Instantiated to delete a table
|
||||||
* Note that it extends ChangeTableState, which takes care of disabling
|
* Note that it extends ChangeTableState, which takes care of disabling
|
||||||
|
|
|
@ -28,7 +28,7 @@ import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
import org.apache.hadoop.hbase.HRegionInterface;
|
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||||
import org.apache.hadoop.hbase.HServerInfo;
|
import org.apache.hadoop.hbase.HServerInfo;
|
||||||
import org.apache.hadoop.hbase.MasterNotRunningException;
|
import org.apache.hadoop.hbase.MasterNotRunningException;
|
||||||
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
||||||
|
|
|
@ -18,7 +18,7 @@
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Implementors of this interface want to be notified when an HRegion
|
* Implementors of this interface want to be notified when an HRegion
|
|
@ -17,7 +17,7 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
|
@ -17,7 +17,9 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.HScannerInterface;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Internally, we need to be able to determine if the scanner is doing wildcard
|
* Internally, we need to be able to determine if the scanner is doing wildcard
|
|
@ -17,7 +17,7 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.FileNotFoundException;
|
import java.io.FileNotFoundException;
|
||||||
|
@ -43,6 +43,11 @@ import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.io.SequenceFile.CompressionType;
|
import org.apache.hadoop.io.SequenceFile.CompressionType;
|
||||||
import org.apache.hadoop.io.SequenceFile.Reader;
|
import org.apache.hadoop.io.SequenceFile.Reader;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* HLog stores all the edits to the HStore.
|
* HLog stores all the edits to the HStore.
|
||||||
*
|
*
|
|
@ -17,13 +17,15 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||||
import org.apache.hadoop.io.*;
|
import org.apache.hadoop.io.*;
|
||||||
|
|
||||||
import java.io.*;
|
import java.io.*;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A log value.
|
* A log value.
|
||||||
*
|
*
|
|
@ -17,7 +17,7 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import org.apache.hadoop.io.*;
|
import org.apache.hadoop.io.*;
|
||||||
|
|
|
@ -17,7 +17,7 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
|
@ -47,6 +47,15 @@ import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.io.WritableUtils;
|
import org.apache.hadoop.io.WritableUtils;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.HScannerInterface;
|
||||||
|
import org.apache.hadoop.hbase.DroppedSnapshotException;
|
||||||
|
import org.apache.hadoop.hbase.WrongRegionException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* HRegion stores data for a certain region of a table. It stores all columns
|
* HRegion stores data for a certain region of a table. It stores all columns
|
||||||
* for each row. A given table consists of one or more HRegions.
|
* for each row. A given table consists of one or more HRegions.
|
||||||
|
@ -96,7 +105,7 @@ public class HRegion implements HConstants {
|
||||||
* HRegionServer. Returns a brand-new active HRegion, also
|
* HRegionServer. Returns a brand-new active HRegion, also
|
||||||
* running on the current HRegionServer.
|
* running on the current HRegionServer.
|
||||||
*/
|
*/
|
||||||
static HRegion closeAndMerge(final HRegion srcA, final HRegion srcB)
|
public static HRegion closeAndMerge(final HRegion srcA, final HRegion srcB)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
|
||||||
HRegion a = srcA;
|
HRegion a = srcA;
|
||||||
|
@ -482,7 +491,7 @@ public class HRegion implements HConstants {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @return region id */
|
/** @return region id */
|
||||||
long getRegionId() {
|
public long getRegionId() {
|
||||||
return this.regionInfo.getRegionId();
|
return this.regionInfo.getRegionId();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -492,7 +501,7 @@ public class HRegion implements HConstants {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @return HTableDescriptor for this region */
|
/** @return HTableDescriptor for this region */
|
||||||
HTableDescriptor getTableDesc() {
|
public HTableDescriptor getTableDesc() {
|
||||||
return this.regionInfo.getTableDesc();
|
return this.regionInfo.getTableDesc();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -502,17 +511,17 @@ public class HRegion implements HConstants {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @return Configuration object */
|
/** @return Configuration object */
|
||||||
HBaseConfiguration getConf() {
|
public HBaseConfiguration getConf() {
|
||||||
return this.conf;
|
return this.conf;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @return region directory Path */
|
/** @return region directory Path */
|
||||||
Path getRegionDir() {
|
public Path getRegionDir() {
|
||||||
return this.regiondir;
|
return this.regiondir;
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @return FileSystem being used by this region */
|
/** @return FileSystem being used by this region */
|
||||||
FileSystem getFilesystem() {
|
public FileSystem getFilesystem() {
|
||||||
return this.fs;
|
return this.fs;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -533,11 +542,11 @@ public class HRegion implements HConstants {
|
||||||
* splitable or not (Its not splitable if region has a store that has a
|
* splitable or not (Its not splitable if region has a store that has a
|
||||||
* reference store file).
|
* reference store file).
|
||||||
*/
|
*/
|
||||||
HStore.HStoreSize largestHStore(Text midkey) {
|
public HStoreSize largestHStore(Text midkey) {
|
||||||
HStore.HStoreSize biggest = null;
|
HStoreSize biggest = null;
|
||||||
boolean splitable = true;
|
boolean splitable = true;
|
||||||
for (HStore h: stores.values()) {
|
for (HStore h: stores.values()) {
|
||||||
HStore.HStoreSize size = h.size(midkey);
|
HStoreSize size = h.size(midkey);
|
||||||
// If we came across a reference down in the store, then propagate
|
// If we came across a reference down in the store, then propagate
|
||||||
// fact that region is not splitable.
|
// fact that region is not splitable.
|
||||||
if (splitable) {
|
if (splitable) {
|
||||||
|
@ -670,7 +679,7 @@ public class HRegion implements HConstants {
|
||||||
* Check it for a midKey value on return.
|
* Check it for a midKey value on return.
|
||||||
*/
|
*/
|
||||||
boolean needsSplit(Text midKey) {
|
boolean needsSplit(Text midKey) {
|
||||||
HStore.HStoreSize biggest = largestHStore(midKey);
|
HStoreSize biggest = largestHStore(midKey);
|
||||||
if (biggest == null || midKey.getLength() == 0 ||
|
if (biggest == null || midKey.getLength() == 0 ||
|
||||||
(midKey.equals(getStartKey()) && midKey.equals(getEndKey())) ) {
|
(midKey.equals(getStartKey()) && midKey.equals(getEndKey())) ) {
|
||||||
return false;
|
return false;
|
||||||
|
@ -701,7 +710,7 @@ public class HRegion implements HConstants {
|
||||||
* @return
|
* @return
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
boolean compactIfNeeded() throws IOException {
|
public boolean compactIfNeeded() throws IOException {
|
||||||
boolean needsCompaction = false;
|
boolean needsCompaction = false;
|
||||||
for (HStore store: stores.values()) {
|
for (HStore store: stores.values()) {
|
||||||
if (store.needsCompaction()) {
|
if (store.needsCompaction()) {
|
||||||
|
@ -761,7 +770,7 @@ public class HRegion implements HConstants {
|
||||||
* conflicts with a region split, and that cannot happen because the region
|
* conflicts with a region split, and that cannot happen because the region
|
||||||
* server does them sequentially and not in parallel.
|
* server does them sequentially and not in parallel.
|
||||||
*/
|
*/
|
||||||
boolean compactStores() throws IOException {
|
public boolean compactStores() throws IOException {
|
||||||
if (this.closed.get()) {
|
if (this.closed.get()) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -821,7 +830,7 @@ public class HRegion implements HConstants {
|
||||||
* @throws DroppedSnapshotException Thrown when replay of hlog is required
|
* @throws DroppedSnapshotException Thrown when replay of hlog is required
|
||||||
* because a Snapshot was not properly persisted.
|
* because a Snapshot was not properly persisted.
|
||||||
*/
|
*/
|
||||||
boolean flushcache() throws IOException {
|
public boolean flushcache() throws IOException {
|
||||||
if (this.closed.get()) {
|
if (this.closed.get()) {
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -1603,7 +1612,7 @@ public class HRegion implements HConstants {
|
||||||
return regionInfo.getRegionName().toString();
|
return regionInfo.getRegionName().toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
private Path getBaseDir() {
|
public Path getBaseDir() {
|
||||||
return this.basedir;
|
return this.basedir;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1903,7 +1912,7 @@ public class HRegion implements HConstants {
|
||||||
* @return Path of HRegion directory
|
* @return Path of HRegion directory
|
||||||
* @see HRegionInfo#encodeRegionName(Text)
|
* @see HRegionInfo#encodeRegionName(Text)
|
||||||
*/
|
*/
|
||||||
static Path getRegionDir(final Path tabledir, final String name) {
|
public static Path getRegionDir(final Path tabledir, final String name) {
|
||||||
return new Path(tabledir, name);
|
return new Path(tabledir, name);
|
||||||
}
|
}
|
||||||
|
|
|
@ -17,7 +17,7 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
@ -27,6 +27,8 @@ import org.apache.hadoop.hbase.io.BatchUpdate;
|
||||||
import org.apache.hadoop.hbase.io.HbaseMapWritable;
|
import org.apache.hadoop.hbase.io.HbaseMapWritable;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.ipc.VersionedProtocol;
|
import org.apache.hadoop.ipc.VersionedProtocol;
|
||||||
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
|
import org.apache.hadoop.hbase.NotServingRegionException;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Clients interact with HRegionServers using a handle to the HRegionInterface.
|
* Clients interact with HRegionServers using a handle to the HRegionInterface.
|
|
@ -17,7 +17,7 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.lang.Thread.UncaughtExceptionHandler;
|
import java.lang.Thread.UncaughtExceptionHandler;
|
||||||
|
@ -66,9 +66,28 @@ import org.apache.hadoop.io.Writable;
|
||||||
import org.apache.hadoop.ipc.Server;
|
import org.apache.hadoop.ipc.Server;
|
||||||
import org.apache.hadoop.net.DNS;
|
import org.apache.hadoop.net.DNS;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
import org.apache.hadoop.hbase.master.HMasterRegionInterface;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
import org.apache.hadoop.hbase.HServerInfo;
|
||||||
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
|
import org.apache.hadoop.hbase.HMsg;
|
||||||
|
import org.apache.hadoop.hbase.Leases;
|
||||||
|
import org.apache.hadoop.hbase.HServerAddress;
|
||||||
|
import org.apache.hadoop.hbase.RegionServerRunningException;
|
||||||
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
|
import org.apache.hadoop.hbase.NotServingRegionException;
|
||||||
|
import org.apache.hadoop.hbase.HScannerInterface;
|
||||||
|
import org.apache.hadoop.hbase.LeaseListener;
|
||||||
|
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
||||||
|
import org.apache.hadoop.hbase.DroppedSnapshotException;
|
||||||
|
import org.apache.hadoop.hbase.HServerLoad;
|
||||||
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.UnknownScannerException;
|
||||||
|
import org.apache.hadoop.hbase.LocalHBaseCluster;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.client.HTable;
|
import org.apache.hadoop.hbase.client.HTable;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.master.HMasterRegionInterface;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* HRegionServer makes a set of HRegions available to clients. It checks in with
|
* HRegionServer makes a set of HRegions available to clients. It checks in with
|
||||||
* the HMaster. There are many HRegionServers in a single HBase deployment.
|
* the HMaster. There are many HRegionServers in a single HBase deployment.
|
||||||
|
@ -971,7 +990,7 @@ public class HRegionServer implements HConstants, HRegionInterface, Runnable {
|
||||||
* in an orderly fashion. Used by unit tests and called by {@link Flusher}
|
* in an orderly fashion. Used by unit tests and called by {@link Flusher}
|
||||||
* if it judges server needs to be restarted.
|
* if it judges server needs to be restarted.
|
||||||
*/
|
*/
|
||||||
synchronized void stop() {
|
public synchronized void stop() {
|
||||||
this.stopRequested.set(true);
|
this.stopRequested.set(true);
|
||||||
notifyAll(); // Wakes run() if it is sleeping
|
notifyAll(); // Wakes run() if it is sleeping
|
||||||
}
|
}
|
||||||
|
@ -982,7 +1001,7 @@ public class HRegionServer implements HConstants, HRegionInterface, Runnable {
|
||||||
* Used unit testing and on catastrophic events such as HDFS is yanked out
|
* Used unit testing and on catastrophic events such as HDFS is yanked out
|
||||||
* from under hbase or we OOME.
|
* from under hbase or we OOME.
|
||||||
*/
|
*/
|
||||||
synchronized void abort() {
|
public synchronized void abort() {
|
||||||
this.abortRequested = true;
|
this.abortRequested = true;
|
||||||
stop();
|
stop();
|
||||||
}
|
}
|
||||||
|
@ -1570,6 +1589,10 @@ public class HRegionServer implements HConstants, HRegionInterface, Runnable {
|
||||||
public HServerInfo getServerInfo() {
|
public HServerInfo getServerInfo() {
|
||||||
return this.serverInfo;
|
return this.serverInfo;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public InfoServer getInfoServer() {
|
||||||
|
return infoServer;
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return Immutable list of this servers regions.
|
* @return Immutable list of this servers regions.
|
|
@ -17,7 +17,7 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.UnsupportedEncodingException;
|
import java.io.UnsupportedEncodingException;
|
||||||
|
@ -50,11 +50,19 @@ import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.io.Writable;
|
import org.apache.hadoop.io.Writable;
|
||||||
import org.apache.hadoop.io.WritableComparable;
|
import org.apache.hadoop.io.WritableComparable;
|
||||||
import org.apache.hadoop.util.StringUtils;
|
import org.apache.hadoop.util.StringUtils;
|
||||||
|
import org.apache.hadoop.hbase.BloomFilterDescriptor;
|
||||||
import org.onelab.filter.BloomFilter;
|
import org.onelab.filter.BloomFilter;
|
||||||
import org.onelab.filter.CountingBloomFilter;
|
import org.onelab.filter.CountingBloomFilter;
|
||||||
import org.onelab.filter.Filter;
|
import org.onelab.filter.Filter;
|
||||||
import org.onelab.filter.RetouchedBloomFilter;
|
import org.onelab.filter.RetouchedBloomFilter;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
|
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.RemoteExceptionHandler;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* HStore maintains a bunch of data files. It is responsible for maintaining
|
* HStore maintains a bunch of data files. It is responsible for maintaining
|
||||||
* the memory/file hierarchy and for periodic flushes to disk and compacting
|
* the memory/file hierarchy and for periodic flushes to disk and compacting
|
||||||
|
@ -932,7 +940,7 @@ public class HStore implements HConstants {
|
||||||
}
|
}
|
||||||
|
|
||||||
BloomFilterDescriptor.BloomFilterType type =
|
BloomFilterDescriptor.BloomFilterType type =
|
||||||
family.getBloomFilter().filterType;
|
family.getBloomFilter().getType();
|
||||||
|
|
||||||
switch(type) {
|
switch(type) {
|
||||||
|
|
||||||
|
@ -964,25 +972,25 @@ public class HStore implements HConstants {
|
||||||
}
|
}
|
||||||
|
|
||||||
BloomFilterDescriptor.BloomFilterType type =
|
BloomFilterDescriptor.BloomFilterType type =
|
||||||
family.getBloomFilter().filterType;
|
family.getBloomFilter().getType();
|
||||||
|
|
||||||
switch(type) {
|
switch(type) {
|
||||||
|
|
||||||
case BLOOMFILTER:
|
case BLOOMFILTER:
|
||||||
bloomFilter = new BloomFilter(family.getBloomFilter().vectorSize,
|
bloomFilter = new BloomFilter(family.getBloomFilter().getVectorSize(),
|
||||||
family.getBloomFilter().nbHash);
|
family.getBloomFilter().getNbHash());
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case COUNTING_BLOOMFILTER:
|
case COUNTING_BLOOMFILTER:
|
||||||
bloomFilter =
|
bloomFilter =
|
||||||
new CountingBloomFilter(family.getBloomFilter().vectorSize,
|
new CountingBloomFilter(family.getBloomFilter().getVectorSize(),
|
||||||
family.getBloomFilter().nbHash);
|
family.getBloomFilter().getNbHash());
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case RETOUCHED_BLOOMFILTER:
|
case RETOUCHED_BLOOMFILTER:
|
||||||
bloomFilter =
|
bloomFilter =
|
||||||
new RetouchedBloomFilter(family.getBloomFilter().vectorSize,
|
new RetouchedBloomFilter(family.getBloomFilter().getVectorSize(),
|
||||||
family.getBloomFilter().nbHash);
|
family.getBloomFilter().getNbHash());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return bloomFilter;
|
return bloomFilter;
|
||||||
|
@ -1963,37 +1971,6 @@ public class HStore implements HConstants {
|
||||||
return target.matchesRowCol(origin);
|
return target.matchesRowCol(origin);
|
||||||
}
|
}
|
||||||
|
|
||||||
/*
|
|
||||||
* Data structure to hold result of a look at store file sizes.
|
|
||||||
*/
|
|
||||||
static class HStoreSize {
|
|
||||||
final long aggregate;
|
|
||||||
final long largest;
|
|
||||||
boolean splitable;
|
|
||||||
|
|
||||||
HStoreSize(final long a, final long l, final boolean s) {
|
|
||||||
this.aggregate = a;
|
|
||||||
this.largest = l;
|
|
||||||
this.splitable = s;
|
|
||||||
}
|
|
||||||
|
|
||||||
long getAggregate() {
|
|
||||||
return this.aggregate;
|
|
||||||
}
|
|
||||||
|
|
||||||
long getLargest() {
|
|
||||||
return this.largest;
|
|
||||||
}
|
|
||||||
|
|
||||||
boolean isSplitable() {
|
|
||||||
return this.splitable;
|
|
||||||
}
|
|
||||||
|
|
||||||
void setSplitable(final boolean s) {
|
|
||||||
this.splitable = s;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Gets size for the store.
|
* Gets size for the store.
|
||||||
*
|
*
|
|
@ -17,7 +17,7 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import java.io.DataInput;
|
import java.io.DataInput;
|
||||||
import java.io.DataInputStream;
|
import java.io.DataInputStream;
|
||||||
|
@ -46,6 +46,8 @@ import org.apache.hadoop.io.WritableComparable;
|
||||||
import org.onelab.filter.Filter;
|
import org.onelab.filter.Filter;
|
||||||
import org.onelab.filter.Key;
|
import org.onelab.filter.Key;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A HStore data file. HStores usually have one or more of these files. They
|
* A HStore data file. HStores usually have one or more of these files. They
|
|
@ -17,10 +17,12 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.io.TextSequence;
|
import org.apache.hadoop.hbase.io.TextSequence;
|
||||||
|
import org.apache.hadoop.hbase.InvalidColumnNameException;
|
||||||
import org.apache.hadoop.io.*;
|
import org.apache.hadoop.io.*;
|
||||||
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
|
||||||
import java.io.*;
|
import java.io.*;
|
||||||
import java.nio.ByteBuffer;
|
import java.nio.ByteBuffer;
|
|
@ -0,0 +1,33 @@
|
||||||
|
|
||||||
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
|
/*
|
||||||
|
* Data structure to hold result of a look at store file sizes.
|
||||||
|
*/
|
||||||
|
public class HStoreSize {
|
||||||
|
final long aggregate;
|
||||||
|
final long largest;
|
||||||
|
boolean splitable;
|
||||||
|
|
||||||
|
HStoreSize(final long a, final long l, final boolean s) {
|
||||||
|
this.aggregate = a;
|
||||||
|
this.largest = l;
|
||||||
|
this.splitable = s;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getAggregate() {
|
||||||
|
return this.aggregate;
|
||||||
|
}
|
||||||
|
|
||||||
|
public long getLargest() {
|
||||||
|
return this.largest;
|
||||||
|
}
|
||||||
|
|
||||||
|
public boolean isSplitable() {
|
||||||
|
return this.splitable;
|
||||||
|
}
|
||||||
|
|
||||||
|
public void setSplitable(final boolean s) {
|
||||||
|
this.splitable = s;
|
||||||
|
}
|
||||||
|
}
|
|
@ -18,7 +18,7 @@
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Mechanism by which the HLog requests a log roll
|
* Mechanism by which the HLog requests a log roll
|
|
@ -17,7 +17,7 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
|
|
@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.client.HTable;
|
||||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.HScannerInterface;
|
import org.apache.hadoop.hbase.HScannerInterface;
|
||||||
import org.apache.hadoop.hbase.HStoreKey;
|
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||||
import org.apache.hadoop.hbase.util.JenkinsHash;
|
import org.apache.hadoop.hbase.util.JenkinsHash;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.mortbay.servlet.MultiPartResponse;
|
import org.mortbay.servlet.MultiPartResponse;
|
||||||
|
|
|
@ -35,7 +35,7 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.HScannerInterface;
|
import org.apache.hadoop.hbase.HScannerInterface;
|
||||||
import org.apache.hadoop.hbase.HStoreKey;
|
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
import org.apache.hadoop.hbase.MasterNotRunningException;
|
import org.apache.hadoop.hbase.MasterNotRunningException;
|
||||||
import org.apache.hadoop.hbase.thrift.generated.AlreadyExists;
|
import org.apache.hadoop.hbase.thrift.generated.AlreadyExists;
|
||||||
|
|
|
@ -56,14 +56,15 @@ import org.apache.hadoop.util.ToolRunner;
|
||||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.HLog;
|
|
||||||
import org.apache.hadoop.hbase.HRegion;
|
|
||||||
import org.apache.hadoop.hbase.HRegionInfo;
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
import org.apache.hadoop.hbase.HScannerInterface;
|
import org.apache.hadoop.hbase.HScannerInterface;
|
||||||
import org.apache.hadoop.hbase.HStore;
|
|
||||||
import org.apache.hadoop.hbase.HStoreKey;
|
|
||||||
import org.apache.hadoop.hbase.MasterNotRunningException;
|
import org.apache.hadoop.hbase.MasterNotRunningException;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HLog;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HStore;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Perform a file system upgrade to convert older file layouts to that
|
* Perform a file system upgrade to convert older file layouts to that
|
||||||
* supported by HADOOP-2478, and then to the form supported by HBASE-69
|
* supported by HADOOP-2478, and then to the form supported by HBASE-69
|
||||||
|
|
|
@ -28,6 +28,8 @@ import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.log4j.Logger;
|
import org.apache.log4j.Logger;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||||
|
|
||||||
/** Abstract base class for merge tests */
|
/** Abstract base class for merge tests */
|
||||||
public abstract class AbstractMergeTestBase extends HBaseTestCase {
|
public abstract class AbstractMergeTestBase extends HBaseTestCase {
|
||||||
static final Logger LOG =
|
static final Logger LOG =
|
||||||
|
|
|
@ -46,6 +46,10 @@ import org.apache.hadoop.io.Writable;
|
||||||
import org.apache.hadoop.hbase.client.HTable;
|
import org.apache.hadoop.hbase.client.HTable;
|
||||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HRegionInterface;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Additional scanner tests.
|
* Additional scanner tests.
|
||||||
* {@link TestScanner} does a custom setup/takedown not conducive
|
* {@link TestScanner} does a custom setup/takedown not conducive
|
||||||
|
|
|
@ -34,6 +34,8 @@ import org.apache.hadoop.hbase.io.BatchUpdate;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.hbase.client.HTable;
|
import org.apache.hadoop.hbase.client.HTable;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Abstract base class for test cases. Performs all static initialization
|
* Abstract base class for test cases. Performs all static initialization
|
||||||
*/
|
*/
|
||||||
|
@ -157,7 +159,7 @@ public abstract class HBaseTestCase extends TestCase {
|
||||||
|
|
||||||
protected HRegion openClosedRegion(final HRegion closedRegion)
|
protected HRegion openClosedRegion(final HRegion closedRegion)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
return new HRegion(closedRegion.basedir, closedRegion.getLog(),
|
return new HRegion(closedRegion.getBaseDir(), closedRegion.getLog(),
|
||||||
closedRegion.getFilesystem(), closedRegion.getConf(),
|
closedRegion.getFilesystem(), closedRegion.getConf(),
|
||||||
closedRegion.getRegionInfo(), null, null);
|
closedRegion.getRegionInfo(), null, null);
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,6 +30,8 @@ import org.apache.log4j.Logger;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.util.FSUtils;
|
import org.apache.hadoop.hbase.util.FSUtils;
|
||||||
import org.apache.hadoop.hbase.master.HMaster;
|
import org.apache.hadoop.hbase.master.HMaster;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HRegionServer;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This class creates a single process HBase cluster. One thread is created for
|
* This class creates a single process HBase cluster. One thread is created for
|
||||||
|
@ -196,7 +198,7 @@ public class MiniHBaseCluster implements HConstants {
|
||||||
public void abortRegionServer(int serverNumber) {
|
public void abortRegionServer(int serverNumber) {
|
||||||
HRegionServer server =
|
HRegionServer server =
|
||||||
this.hbaseCluster.getRegionServers().get(serverNumber).getRegionServer();
|
this.hbaseCluster.getRegionServers().get(serverNumber).getRegionServer();
|
||||||
LOG.info("Aborting " + server.serverInfo.toString());
|
LOG.info("Aborting " + server.getServerInfo().toString());
|
||||||
server.abort();
|
server.abort();
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -262,10 +264,10 @@ public class MiniHBaseCluster implements HConstants {
|
||||||
* Call flushCache on all regions on all participating regionservers.
|
* Call flushCache on all regions on all participating regionservers.
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
void flushcache() throws IOException {
|
public void flushcache() throws IOException {
|
||||||
for (LocalHBaseCluster.RegionServerThread t:
|
for (LocalHBaseCluster.RegionServerThread t:
|
||||||
this.hbaseCluster.getRegionServers()) {
|
this.hbaseCluster.getRegionServers()) {
|
||||||
for(HRegion r: t.getRegionServer().onlineRegions.values() ) {
|
for(HRegion r: t.getRegionServer().getOnlineRegions().values() ) {
|
||||||
r.flushcache();
|
r.flushcache();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,6 +33,10 @@ import org.apache.hadoop.hbase.util.Writables;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.hbase.client.HTable;
|
import org.apache.hadoop.hbase.client.HTable;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HRegionServer;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Utility class to build a table of multiple regions.
|
* Utility class to build a table of multiple regions.
|
||||||
*/
|
*/
|
||||||
|
@ -99,7 +103,7 @@ public class MultiRegionTable extends HBaseTestCase {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
LOG.info("Region location: " + hri);
|
LOG.info("Region location: " + hri);
|
||||||
r = server.onlineRegions.get(hri.getRegionName());
|
r = server.getOnlineRegions().get(hri.getRegionName());
|
||||||
if (r != null) {
|
if (r != null) {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -335,7 +339,7 @@ public class MultiRegionTable extends HBaseTestCase {
|
||||||
LOG.info("Starting compaction");
|
LOG.info("Starting compaction");
|
||||||
for (LocalHBaseCluster.RegionServerThread thread:
|
for (LocalHBaseCluster.RegionServerThread thread:
|
||||||
cluster.getRegionThreads()) {
|
cluster.getRegionThreads()) {
|
||||||
SortedMap<Text, HRegion> regions = thread.getRegionServer().onlineRegions;
|
SortedMap<Text, HRegion> regions = thread.getRegionServer().getOnlineRegions();
|
||||||
|
|
||||||
// Retry if ConcurrentModification... alternative of sync'ing is not
|
// Retry if ConcurrentModification... alternative of sync'ing is not
|
||||||
// worth it for sake of unit test.
|
// worth it for sake of unit test.
|
||||||
|
|
|
@ -49,6 +49,8 @@ import org.apache.log4j.Logger;
|
||||||
import org.apache.hadoop.hbase.client.HTable;
|
import org.apache.hadoop.hbase.client.HTable;
|
||||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Script used evaluating HBase performance and scalability. Runs a HBase
|
* Script used evaluating HBase performance and scalability. Runs a HBase
|
||||||
* client that steps through one of a set of hardcoded tests or 'experiments'
|
* client that steps through one of a set of hardcoded tests or 'experiments'
|
||||||
|
|
|
@ -21,6 +21,7 @@ package org.apache.hadoop.hbase;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
|
||||||
import junit.framework.TestCase;
|
import junit.framework.TestCase;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test comparing HBase objects.
|
* Test comparing HBase objects.
|
||||||
|
|
|
@ -27,6 +27,7 @@ import java.util.TreeMap;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||||
import org.apache.hadoop.hbase.client.HTable;
|
import org.apache.hadoop.hbase.client.HTable;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test HBase Master and Region servers, client API
|
* Test HBase Master and Region servers, client API
|
||||||
|
|
|
@ -63,7 +63,7 @@ public class TestInfoServers extends HBaseTestCase {
|
||||||
assertHasExpectedContent(new URL("http://localhost:" + port +
|
assertHasExpectedContent(new URL("http://localhost:" + port +
|
||||||
"/index.html"), "Master");
|
"/index.html"), "Master");
|
||||||
port = miniHbase.getRegionThreads().get(0).getRegionServer().
|
port = miniHbase.getRegionThreads().get(0).getRegionServer().
|
||||||
infoServer.getPort();
|
getInfoServer().getPort();
|
||||||
assertHasExpectedContent(new URL("http://localhost:" + port +
|
assertHasExpectedContent(new URL("http://localhost:" + port +
|
||||||
"/index.html"), "Region Server");
|
"/index.html"), "Region Server");
|
||||||
} finally {
|
} finally {
|
||||||
|
|
|
@ -30,6 +30,8 @@ import java.util.TreeMap;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||||
import org.apache.hadoop.hbase.client.HTable;
|
import org.apache.hadoop.hbase.client.HTable;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||||
|
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||||
import org.apache.hadoop.hbase.io.BatchUpdate;
|
import org.apache.hadoop.hbase.io.BatchUpdate;
|
||||||
|
|
||||||
/** test the scanner API at all levels */
|
/** test the scanner API at all levels */
|
||||||
|
|
|
@ -28,7 +28,7 @@ import org.apache.hadoop.hbase.HBaseClusterTestCase;
|
||||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.HScannerInterface;
|
import org.apache.hadoop.hbase.HScannerInterface;
|
||||||
import org.apache.hadoop.hbase.HStoreKey;
|
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -33,7 +33,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
import org.apache.hadoop.hbase.TableNotFoundException;
|
import org.apache.hadoop.hbase.TableNotFoundException;
|
||||||
import org.apache.hadoop.hbase.HScannerInterface;
|
import org.apache.hadoop.hbase.HScannerInterface;
|
||||||
import org.apache.hadoop.hbase.HStoreKey;
|
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tests HTable
|
* Tests HTable
|
||||||
|
|
|
@ -30,7 +30,7 @@ import java.util.TreeMap;
|
||||||
import junit.framework.TestCase;
|
import junit.framework.TestCase;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.HLogEdit;
|
import org.apache.hadoop.hbase.regionserver.HLogEdit;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -37,9 +37,9 @@ import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.HRegion;
|
import org.apache.hadoop.hbase.regionserver.HRegion;
|
||||||
import org.apache.hadoop.hbase.HScannerInterface;
|
import org.apache.hadoop.hbase.HScannerInterface;
|
||||||
import org.apache.hadoop.hbase.HStoreKey;
|
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||||
import org.apache.hadoop.hbase.client.HTable;
|
import org.apache.hadoop.hbase.client.HTable;
|
||||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
import org.apache.hadoop.hbase.MiniHBaseCluster;
|
import org.apache.hadoop.hbase.MiniHBaseCluster;
|
||||||
|
|
|
@ -32,7 +32,7 @@ import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.HScannerInterface;
|
import org.apache.hadoop.hbase.HScannerInterface;
|
||||||
import org.apache.hadoop.hbase.HStoreKey;
|
import org.apache.hadoop.hbase.regionserver.HStoreKey;
|
||||||
import org.apache.hadoop.hbase.client.HTable;
|
import org.apache.hadoop.hbase.client.HTable;
|
||||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
import org.apache.hadoop.hbase.MiniHBaseCluster;
|
import org.apache.hadoop.hbase.MiniHBaseCluster;
|
||||||
|
|
|
@ -17,13 +17,15 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.io.BatchUpdate;
|
import org.apache.hadoop.hbase.io.BatchUpdate;
|
||||||
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
|
import org.apache.hadoop.hbase.HServerAddress;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
|
||||||
/**
|
/**
|
|
@ -17,7 +17,7 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
@ -26,9 +26,14 @@ import org.apache.hadoop.dfs.MiniDFSCluster;
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||||
|
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||||
import org.apache.hadoop.io.MapFile;
|
import org.apache.hadoop.io.MapFile;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.StaticTestEnvironment;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test compactions
|
* Test compactions
|
||||||
*/
|
*/
|
|
@ -17,7 +17,7 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -25,6 +25,10 @@ import java.util.Map;
|
||||||
import org.apache.hadoop.dfs.MiniDFSCluster;
|
import org.apache.hadoop.dfs.MiniDFSCluster;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.commons.logging.*;
|
import org.apache.commons.logging.*;
|
||||||
|
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||||
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.StaticTestEnvironment;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test the functionality of deleteAll.
|
* Test the functionality of deleteAll.
|
|
@ -17,7 +17,7 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -26,6 +26,11 @@ import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.dfs.MiniDFSCluster;
|
import org.apache.hadoop.dfs.MiniDFSCluster;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.StaticTestEnvironment;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test the functionality of deleteFamily.
|
* Test the functionality of deleteFamily.
|
|
@ -17,7 +17,7 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
|
@ -27,9 +27,18 @@ import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.dfs.MiniDFSCluster;
|
import org.apache.hadoop.dfs.MiniDFSCluster;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.util.Writables;
|
import org.apache.hadoop.hbase.util.Writables;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.HServerAddress;
|
||||||
|
import org.apache.hadoop.hbase.StaticTestEnvironment;
|
||||||
|
|
||||||
|
|
||||||
/** Test case for get */
|
/** Test case for get */
|
||||||
public class TestGet extends HBaseTestCase {
|
public class TestGet extends HBaseTestCase {
|
||||||
private static final Log LOG = LogFactory.getLog(TestGet.class.getName());
|
private static final Log LOG = LogFactory.getLog(TestGet.class.getName());
|
|
@ -17,7 +17,7 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
@ -27,6 +27,11 @@ import org.apache.hadoop.dfs.MiniDFSCluster;
|
||||||
import org.apache.hadoop.hbase.filter.StopRowFilter;
|
import org.apache.hadoop.hbase.filter.StopRowFilter;
|
||||||
import org.apache.hadoop.hbase.filter.WhileMatchRowFilter;
|
import org.apache.hadoop.hbase.filter.WhileMatchRowFilter;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||||
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
import org.apache.hadoop.hbase.HScannerInterface;
|
||||||
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
|
@ -17,7 +17,7 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.TreeMap;
|
import java.util.TreeMap;
|
||||||
|
@ -28,6 +28,10 @@ import org.apache.hadoop.io.SequenceFile;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.io.SequenceFile.Reader;
|
import org.apache.hadoop.io.SequenceFile.Reader;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||||
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
import org.apache.hadoop.hbase.StaticTestEnvironment;
|
||||||
|
|
||||||
/** JUnit test case for HLog */
|
/** JUnit test case for HLog */
|
||||||
public class TestHLog extends HBaseTestCase implements HConstants {
|
public class TestHLog extends HBaseTestCase implements HConstants {
|
||||||
private Path dir;
|
private Path dir;
|
|
@ -17,7 +17,7 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.UnsupportedEncodingException;
|
import java.io.UnsupportedEncodingException;
|
||||||
|
@ -28,6 +28,8 @@ import junit.framework.TestCase;
|
||||||
|
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
|
||||||
/** memcache test case */
|
/** memcache test case */
|
||||||
public class TestHMemcache extends TestCase {
|
public class TestHMemcache extends TestCase {
|
||||||
|
|
|
@ -17,7 +17,7 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import java.io.File;
|
import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
@ -29,8 +29,14 @@ import java.util.TreeMap;
|
||||||
import org.apache.hadoop.dfs.MiniDFSCluster;
|
import org.apache.hadoop.dfs.MiniDFSCluster;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||||
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
import org.apache.log4j.Logger;
|
import org.apache.log4j.Logger;
|
||||||
|
import org.apache.hadoop.hbase.StaticTestEnvironment;
|
||||||
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.HScannerInterface;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Basic stand-alone testing of HRegion.
|
* Basic stand-alone testing of HRegion.
|
|
@ -17,7 +17,7 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
|
|
||||||
|
@ -30,7 +30,10 @@ import org.apache.hadoop.io.MapFile;
|
||||||
import org.apache.hadoop.io.SequenceFile;
|
import org.apache.hadoop.io.SequenceFile;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.io.WritableComparable;
|
import org.apache.hadoop.io.WritableComparable;
|
||||||
|
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
import org.apache.hadoop.hbase.StaticTestEnvironment;
|
||||||
/**
|
/**
|
||||||
* Test HStoreFile
|
* Test HStoreFile
|
||||||
*/
|
*/
|
|
@ -17,7 +17,7 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
@ -28,6 +28,13 @@ import org.apache.hadoop.dfs.MiniDFSCluster;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.hbase.client.HTable;
|
import org.apache.hadoop.hbase.client.HTable;
|
||||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||||
|
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||||
|
import org.apache.hadoop.hbase.MiniHBaseCluster;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
import org.apache.hadoop.hbase.StaticTestEnvironment;
|
||||||
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test log deletion as logs are rolled.
|
* Test log deletion as logs are rolled.
|
|
@ -17,7 +17,7 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.Collection;
|
import java.util.Collection;
|
||||||
|
@ -32,6 +32,13 @@ import org.apache.hadoop.hbase.io.BatchUpdate;
|
||||||
import org.apache.hadoop.hbase.client.HTable;
|
import org.apache.hadoop.hbase.client.HTable;
|
||||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.HBaseClusterTestCase;
|
||||||
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.LocalHBaseCluster;
|
||||||
|
import org.apache.hadoop.hbase.HScannerInterface;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tests region server failover when a region server exits both cleanly and
|
* Tests region server failover when a region server exits both cleanly and
|
||||||
* when it aborts.
|
* when it aborts.
|
|
@ -17,7 +17,7 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import java.io.ByteArrayOutputStream;
|
import java.io.ByteArrayOutputStream;
|
||||||
import java.io.DataOutputStream;
|
import java.io.DataOutputStream;
|
||||||
|
@ -27,6 +27,13 @@ import java.util.TreeMap;
|
||||||
import org.apache.hadoop.dfs.MiniDFSCluster;
|
import org.apache.hadoop.dfs.MiniDFSCluster;
|
||||||
import org.apache.hadoop.hbase.util.Writables;
|
import org.apache.hadoop.hbase.util.Writables;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||||
|
import org.apache.hadoop.hbase.HRegionInfo;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
import org.apache.hadoop.hbase.HScannerInterface;
|
||||||
|
import org.apache.hadoop.hbase.HServerAddress;
|
||||||
|
import org.apache.hadoop.hbase.StaticTestEnvironment;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test of a long-lived scanner validating as we go.
|
* Test of a long-lived scanner validating as we go.
|
|
@ -17,7 +17,7 @@
|
||||||
* See the License for the specific language governing permissions and
|
* See the License for the specific language governing permissions and
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.TreeMap;
|
import java.util.TreeMap;
|
||||||
|
@ -28,6 +28,11 @@ import org.apache.hadoop.dfs.MiniDFSCluster;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.log4j.Level;
|
import org.apache.log4j.Level;
|
||||||
import org.apache.log4j.Logger;
|
import org.apache.log4j.Logger;
|
||||||
|
import org.apache.hadoop.hbase.MultiRegionTable;
|
||||||
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.HScannerInterface;
|
||||||
|
import org.apache.hadoop.hbase.StaticTestEnvironment;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* {@Link TestHRegion} does a split but this TestCase adds testing of fast
|
* {@Link TestHRegion} does a split but this TestCase adds testing of fast
|
|
@ -16,7 +16,7 @@
|
||||||
* limitations under the License.
|
* limitations under the License.
|
||||||
*/
|
*/
|
||||||
|
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase.regionserver;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.TreeMap;
|
import java.util.TreeMap;
|
||||||
|
@ -28,6 +28,13 @@ import org.apache.hadoop.hbase.util.Writables;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.hbase.client.HTable;
|
import org.apache.hadoop.hbase.client.HTable;
|
||||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||||
|
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||||
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
import org.apache.hadoop.hbase.StaticTestEnvironment;
|
||||||
|
import org.apache.hadoop.hbase.MiniHBaseCluster;
|
||||||
|
import org.apache.hadoop.hbase.HScannerInterface;
|
||||||
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
|
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Tests user specifiable time stamps putting, getting and scanning. Also
|
* Tests user specifiable time stamps putting, getting and scanning. Also
|
|
@ -2,8 +2,8 @@
|
||||||
import="java.util.*"
|
import="java.util.*"
|
||||||
import="org.apache.hadoop.io.Text"
|
import="org.apache.hadoop.io.Text"
|
||||||
import="org.apache.hadoop.util.VersionInfo"
|
import="org.apache.hadoop.util.VersionInfo"
|
||||||
import="org.apache.hadoop.hbase.HRegionServer"
|
import="org.apache.hadoop.hbase.regionserver.HRegionServer"
|
||||||
import="org.apache.hadoop.hbase.HRegion"
|
import="org.apache.hadoop.hbase.regionserver.HRegion"
|
||||||
import="org.apache.hadoop.hbase.HConstants"
|
import="org.apache.hadoop.hbase.HConstants"
|
||||||
import="org.apache.hadoop.hbase.HServerInfo"
|
import="org.apache.hadoop.hbase.HServerInfo"
|
||||||
import="org.apache.hadoop.hbase.HRegionInfo" %><%
|
import="org.apache.hadoop.hbase.HRegionInfo" %><%
|
||||||
|
|
Loading…
Reference in New Issue