HADOOP-2155 Method expecting HBaseConfiguration throw NPE when given Configuration
git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk/src/contrib/hbase@592549 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
cda8c597fc
commit
9be624fc93
|
@ -26,6 +26,7 @@ Trunk (unreleased changes)
|
||||||
always kill the region server for the META region. This makes
|
always kill the region server for the META region. This makes
|
||||||
the test more deterministic and getting META reassigned was
|
the test more deterministic and getting META reassigned was
|
||||||
problematic.
|
problematic.
|
||||||
|
HADOOP-2155 Method expecting HBaseConfiguration throws NPE when given Configuration
|
||||||
|
|
||||||
IMPROVEMENTS
|
IMPROVEMENTS
|
||||||
HADOOP-2401 Add convenience put method that takes writable
|
HADOOP-2401 Add convenience put method that takes writable
|
||||||
|
|
|
@ -20,22 +20,19 @@
|
||||||
package org.apache.hadoop.hbase;
|
package org.apache.hadoop.hbase;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.NoSuchElementException;
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
import java.util.NoSuchElementException;
|
||||||
import java.util.SortedMap;
|
import java.util.SortedMap;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
|
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.hbase.util.Writables;
|
||||||
import org.apache.hadoop.io.MapWritable;
|
import org.apache.hadoop.io.MapWritable;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.io.Writable;
|
import org.apache.hadoop.io.Writable;
|
||||||
import org.apache.hadoop.ipc.RemoteException;
|
import org.apache.hadoop.ipc.RemoteException;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
|
||||||
import org.apache.hadoop.hbase.util.Writables;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Provides administrative functions for HBase
|
* Provides administrative functions for HBase
|
||||||
*/
|
*/
|
||||||
|
@ -53,7 +50,7 @@ public class HBaseAdmin implements HConstants {
|
||||||
* @param conf Configuration object
|
* @param conf Configuration object
|
||||||
* @throws MasterNotRunningException
|
* @throws MasterNotRunningException
|
||||||
*/
|
*/
|
||||||
public HBaseAdmin(Configuration conf) throws MasterNotRunningException {
|
public HBaseAdmin(HBaseConfiguration conf) throws MasterNotRunningException {
|
||||||
this.connection = HConnectionManager.getConnection(conf);
|
this.connection = HConnectionManager.getConnection(conf);
|
||||||
this.pause = conf.getLong("hbase.client.pause", 30 * 1000);
|
this.pause = conf.getLong("hbase.client.pause", 30 * 1000);
|
||||||
this.numRetries = conf.getInt("hbase.client.retries.number", 5);
|
this.numRetries = conf.getInt("hbase.client.retries.number", 5);
|
||||||
|
|
|
@ -28,8 +28,7 @@ public class HBaseConfiguration extends Configuration {
|
||||||
/** constructor */
|
/** constructor */
|
||||||
public HBaseConfiguration() {
|
public HBaseConfiguration() {
|
||||||
super();
|
super();
|
||||||
addResource("hbase-default.xml");
|
addHbaseResources();
|
||||||
addResource("hbase-site.xml");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -38,5 +37,13 @@ public class HBaseConfiguration extends Configuration {
|
||||||
*/
|
*/
|
||||||
public HBaseConfiguration(final Configuration c) {
|
public HBaseConfiguration(final Configuration c) {
|
||||||
super(c);
|
super(c);
|
||||||
|
if (!(c instanceof HBaseConfiguration)) {
|
||||||
|
addHbaseResources();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
private void addHbaseResources() {
|
||||||
|
addResource("hbase-default.xml");
|
||||||
|
addResource("hbase-site.xml");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -30,15 +30,13 @@ import java.util.TreeMap;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||||
import org.apache.hadoop.ipc.RPC;
|
import org.apache.hadoop.hbase.util.Writables;
|
||||||
import org.apache.hadoop.ipc.RemoteException;
|
|
||||||
import org.apache.hadoop.io.MapWritable;
|
import org.apache.hadoop.io.MapWritable;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.io.Writable;
|
import org.apache.hadoop.io.Writable;
|
||||||
|
import org.apache.hadoop.ipc.RPC;
|
||||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
import org.apache.hadoop.ipc.RemoteException;
|
||||||
import org.apache.hadoop.hbase.util.Writables;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A non-instantiable class that manages connections to multiple tables in
|
* A non-instantiable class that manages connections to multiple tables in
|
||||||
|
@ -65,7 +63,7 @@ public class HConnectionManager implements HConstants {
|
||||||
* @param conf
|
* @param conf
|
||||||
* @return HConnection object for the instance specified by the configuration
|
* @return HConnection object for the instance specified by the configuration
|
||||||
*/
|
*/
|
||||||
public static HConnection getConnection(Configuration conf) {
|
public static HConnection getConnection(HBaseConfiguration conf) {
|
||||||
HConnection connection;
|
HConnection connection;
|
||||||
synchronized (HBASE_INSTANCES) {
|
synchronized (HBASE_INSTANCES) {
|
||||||
String instanceName = conf.get(HBASE_DIR, DEFAULT_HBASE_DIR);
|
String instanceName = conf.get(HBASE_DIR, DEFAULT_HBASE_DIR);
|
||||||
|
@ -84,7 +82,7 @@ public class HConnectionManager implements HConstants {
|
||||||
* Delete connection information for the instance specified by the configuration
|
* Delete connection information for the instance specified by the configuration
|
||||||
* @param conf
|
* @param conf
|
||||||
*/
|
*/
|
||||||
public static void deleteConnection(Configuration conf) {
|
public static void deleteConnection(HBaseConfiguration conf) {
|
||||||
synchronized (HBASE_INSTANCES) {
|
synchronized (HBASE_INSTANCES) {
|
||||||
HBASE_INSTANCES.remove(conf.get(HBASE_DIR, DEFAULT_HBASE_DIR));
|
HBASE_INSTANCES.remove(conf.get(HBASE_DIR, DEFAULT_HBASE_DIR));
|
||||||
}
|
}
|
||||||
|
@ -106,7 +104,7 @@ public class HConnectionManager implements HConstants {
|
||||||
private final Integer rootRegionLock = new Integer(0);
|
private final Integer rootRegionLock = new Integer(0);
|
||||||
private final Integer metaRegionLock = new Integer(0);
|
private final Integer metaRegionLock = new Integer(0);
|
||||||
|
|
||||||
private volatile Configuration conf;
|
private volatile HBaseConfiguration conf;
|
||||||
|
|
||||||
// Map tableName -> (Map startRow -> (HRegionInfo, HServerAddress)
|
// Map tableName -> (Map startRow -> (HRegionInfo, HServerAddress)
|
||||||
private Map<Text, SortedMap<Text, HRegionLocation>> tablesToServers;
|
private Map<Text, SortedMap<Text, HRegionLocation>> tablesToServers;
|
||||||
|
@ -125,7 +123,7 @@ public class HConnectionManager implements HConstants {
|
||||||
* @param conf Configuration object
|
* @param conf Configuration object
|
||||||
*/
|
*/
|
||||||
@SuppressWarnings("unchecked")
|
@SuppressWarnings("unchecked")
|
||||||
public TableServers(Configuration conf) {
|
public TableServers(HBaseConfiguration conf) {
|
||||||
this.conf = LocalHBaseCluster.doLocal(new HBaseConfiguration(conf));
|
this.conf = LocalHBaseCluster.doLocal(new HBaseConfiguration(conf));
|
||||||
|
|
||||||
String serverClassName =
|
String serverClassName =
|
||||||
|
|
|
@ -94,7 +94,7 @@ HMasterRegionInterface {
|
||||||
volatile AtomicBoolean closed = new AtomicBoolean(true);
|
volatile AtomicBoolean closed = new AtomicBoolean(true);
|
||||||
volatile boolean fsOk;
|
volatile boolean fsOk;
|
||||||
Path dir;
|
Path dir;
|
||||||
Configuration conf;
|
HBaseConfiguration conf;
|
||||||
FileSystem fs;
|
FileSystem fs;
|
||||||
Random rand;
|
Random rand;
|
||||||
int threadWakeFrequency;
|
int threadWakeFrequency;
|
||||||
|
@ -868,7 +868,7 @@ HMasterRegionInterface {
|
||||||
* @param conf - Configuration object
|
* @param conf - Configuration object
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public HMaster(Configuration conf) throws IOException {
|
public HMaster(HBaseConfiguration conf) throws IOException {
|
||||||
this(new Path(conf.get(HBASE_DIR, DEFAULT_HBASE_DIR)),
|
this(new Path(conf.get(HBASE_DIR, DEFAULT_HBASE_DIR)),
|
||||||
new HServerAddress(conf.get(MASTER_ADDRESS, DEFAULT_MASTER_ADDRESS)),
|
new HServerAddress(conf.get(MASTER_ADDRESS, DEFAULT_MASTER_ADDRESS)),
|
||||||
conf);
|
conf);
|
||||||
|
@ -882,7 +882,7 @@ HMasterRegionInterface {
|
||||||
*
|
*
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public HMaster(Path dir, HServerAddress address, Configuration conf)
|
public HMaster(Path dir, HServerAddress address, HBaseConfiguration conf)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
this.fsOk = true;
|
this.fsOk = true;
|
||||||
this.dir = dir;
|
this.dir = dir;
|
||||||
|
@ -3044,7 +3044,7 @@ HMasterRegionInterface {
|
||||||
printUsageAndExit();
|
printUsageAndExit();
|
||||||
}
|
}
|
||||||
|
|
||||||
Configuration conf = new HBaseConfiguration();
|
HBaseConfiguration conf = new HBaseConfiguration();
|
||||||
|
|
||||||
// Process command-line args. TODO: Better cmd-line processing
|
// Process command-line args. TODO: Better cmd-line processing
|
||||||
// (but hopefully something not as painful as cli options).
|
// (but hopefully something not as painful as cli options).
|
||||||
|
|
|
@ -28,14 +28,12 @@ import java.util.TreeSet;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
import org.apache.hadoop.hbase.util.Writables;
|
||||||
import org.apache.hadoop.io.DataInputBuffer;
|
import org.apache.hadoop.io.DataInputBuffer;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.util.Writables;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A non-instantiable class that has a static method capable of compacting
|
* A non-instantiable class that has a static method capable of compacting
|
||||||
* a table by merging adjacent regions that have grown too small.
|
* a table by merging adjacent regions that have grown too small.
|
||||||
|
@ -61,7 +59,7 @@ class HMerge implements HConstants {
|
||||||
* @param tableName - Table to be compacted
|
* @param tableName - Table to be compacted
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public static void merge(Configuration conf, FileSystem fs, Text tableName)
|
public static void merge(HBaseConfiguration conf, FileSystem fs, Text tableName)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
HConnection connection = HConnectionManager.getConnection(conf);
|
HConnection connection = HConnectionManager.getConnection(conf);
|
||||||
boolean masterIsRunning = connection.isMasterRunning();
|
boolean masterIsRunning = connection.isMasterRunning();
|
||||||
|
@ -82,7 +80,7 @@ class HMerge implements HConstants {
|
||||||
}
|
}
|
||||||
|
|
||||||
private static abstract class Merger {
|
private static abstract class Merger {
|
||||||
protected Configuration conf;
|
protected HBaseConfiguration conf;
|
||||||
protected FileSystem fs;
|
protected FileSystem fs;
|
||||||
protected Text tableName;
|
protected Text tableName;
|
||||||
protected Path dir;
|
protected Path dir;
|
||||||
|
@ -93,7 +91,7 @@ class HMerge implements HConstants {
|
||||||
protected HStoreKey key;
|
protected HStoreKey key;
|
||||||
protected HRegionInfo info;
|
protected HRegionInfo info;
|
||||||
|
|
||||||
protected Merger(Configuration conf, FileSystem fs, Text tableName)
|
protected Merger(HBaseConfiguration conf, FileSystem fs, Text tableName)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
|
||||||
this.conf = conf;
|
this.conf = conf;
|
||||||
|
@ -200,7 +198,7 @@ class HMerge implements HConstants {
|
||||||
private HScannerInterface metaScanner;
|
private HScannerInterface metaScanner;
|
||||||
private HRegionInfo latestRegion;
|
private HRegionInfo latestRegion;
|
||||||
|
|
||||||
OnlineMerger(Configuration conf, FileSystem fs, Text tableName)
|
OnlineMerger(HBaseConfiguration conf, FileSystem fs, Text tableName)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
|
||||||
super(conf, fs, tableName);
|
super(conf, fs, tableName);
|
||||||
|
@ -315,7 +313,7 @@ class HMerge implements HConstants {
|
||||||
private TreeSet<HRegionInfo> metaRegions;
|
private TreeSet<HRegionInfo> metaRegions;
|
||||||
private TreeMap<Text, byte []> results;
|
private TreeMap<Text, byte []> results;
|
||||||
|
|
||||||
OfflineMerger(Configuration conf, FileSystem fs, Text tableName)
|
OfflineMerger(HBaseConfiguration conf, FileSystem fs, Text tableName)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
|
||||||
super(conf, fs, tableName);
|
super(conf, fs, tableName);
|
||||||
|
|
|
@ -111,7 +111,7 @@ public class HRegion implements HConstants {
|
||||||
throw new IOException("Cannot merge non-adjacent regions");
|
throw new IOException("Cannot merge non-adjacent regions");
|
||||||
}
|
}
|
||||||
|
|
||||||
Configuration conf = a.getConf();
|
HBaseConfiguration conf = a.getConf();
|
||||||
HTableDescriptor tabledesc = a.getTableDesc();
|
HTableDescriptor tabledesc = a.getTableDesc();
|
||||||
HLog log = a.getLog();
|
HLog log = a.getLog();
|
||||||
Path rootDir = a.getRootDir();
|
Path rootDir = a.getRootDir();
|
||||||
|
@ -194,7 +194,7 @@ public class HRegion implements HConstants {
|
||||||
Path rootDir;
|
Path rootDir;
|
||||||
HLog log;
|
HLog log;
|
||||||
FileSystem fs;
|
FileSystem fs;
|
||||||
Configuration conf;
|
HBaseConfiguration conf;
|
||||||
HRegionInfo regionInfo;
|
HRegionInfo regionInfo;
|
||||||
Path regiondir;
|
Path regiondir;
|
||||||
|
|
||||||
|
@ -242,7 +242,7 @@ public class HRegion implements HConstants {
|
||||||
*
|
*
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public HRegion(Path rootDir, HLog log, FileSystem fs, Configuration conf,
|
public HRegion(Path rootDir, HLog log, FileSystem fs, HBaseConfiguration conf,
|
||||||
HRegionInfo regionInfo, Path initialFiles)
|
HRegionInfo regionInfo, Path initialFiles)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
this.rootDir = rootDir;
|
this.rootDir = rootDir;
|
||||||
|
@ -559,7 +559,7 @@ public class HRegion implements HConstants {
|
||||||
}
|
}
|
||||||
|
|
||||||
/** @return Configuration object */
|
/** @return Configuration object */
|
||||||
public Configuration getConf() {
|
public HBaseConfiguration getConf() {
|
||||||
return this.conf;
|
return this.conf;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1834,7 +1834,8 @@ public class HRegion implements HConstants {
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
static HRegion createHRegion(final HRegionInfo info, final Path rootDir,
|
static HRegion createHRegion(final HRegionInfo info, final Path rootDir,
|
||||||
final Configuration conf, final Path initialFiles) throws IOException {
|
final HBaseConfiguration conf, final Path initialFiles)
|
||||||
|
throws IOException {
|
||||||
Path regionDir = HRegion.getRegionDir(rootDir,
|
Path regionDir = HRegion.getRegionDir(rootDir,
|
||||||
HRegionInfo.encodeRegionName(info.getRegionName()));
|
HRegionInfo.encodeRegionName(info.getRegionName()));
|
||||||
FileSystem fs = FileSystem.get(conf);
|
FileSystem fs = FileSystem.get(conf);
|
||||||
|
|
|
@ -85,7 +85,7 @@ public class HRegionServer implements HConstants, HRegionInterface, Runnable {
|
||||||
protected volatile boolean fsOk;
|
protected volatile boolean fsOk;
|
||||||
|
|
||||||
protected final HServerInfo serverInfo;
|
protected final HServerInfo serverInfo;
|
||||||
protected final Configuration conf;
|
protected final HBaseConfiguration conf;
|
||||||
private final Random rand = new Random();
|
private final Random rand = new Random();
|
||||||
|
|
||||||
// region name -> HRegion
|
// region name -> HRegion
|
||||||
|
@ -373,7 +373,7 @@ public class HRegionServer implements HConstants, HRegionInterface, Runnable {
|
||||||
* @param conf
|
* @param conf
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public HRegionServer(Configuration conf) throws IOException {
|
public HRegionServer(HBaseConfiguration conf) throws IOException {
|
||||||
this(new HServerAddress(conf.get(REGIONSERVER_ADDRESS,
|
this(new HServerAddress(conf.get(REGIONSERVER_ADDRESS,
|
||||||
DEFAULT_REGIONSERVER_ADDRESS)), conf);
|
DEFAULT_REGIONSERVER_ADDRESS)), conf);
|
||||||
}
|
}
|
||||||
|
@ -384,7 +384,7 @@ public class HRegionServer implements HConstants, HRegionInterface, Runnable {
|
||||||
* @param conf
|
* @param conf
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public HRegionServer(HServerAddress address, Configuration conf)
|
public HRegionServer(HServerAddress address, HBaseConfiguration conf)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
this.abortRequested = false;
|
this.abortRequested = false;
|
||||||
this.fsOk = true;
|
this.fsOk = true;
|
||||||
|
|
|
@ -37,7 +37,6 @@ import java.util.Map.Entry;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.apache.hadoop.fs.FSDataInputStream;
|
import org.apache.hadoop.fs.FSDataInputStream;
|
||||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
|
@ -77,7 +76,7 @@ class HStore implements HConstants {
|
||||||
Text familyName;
|
Text familyName;
|
||||||
SequenceFile.CompressionType compression;
|
SequenceFile.CompressionType compression;
|
||||||
FileSystem fs;
|
FileSystem fs;
|
||||||
Configuration conf;
|
HBaseConfiguration conf;
|
||||||
Path mapdir;
|
Path mapdir;
|
||||||
Path loginfodir;
|
Path loginfodir;
|
||||||
Path filterDir;
|
Path filterDir;
|
||||||
|
@ -141,7 +140,7 @@ class HStore implements HConstants {
|
||||||
*/
|
*/
|
||||||
HStore(Path dir, Text regionName, String encodedName,
|
HStore(Path dir, Text regionName, String encodedName,
|
||||||
HColumnDescriptor family, FileSystem fs, Path reconstructionLog,
|
HColumnDescriptor family, FileSystem fs, Path reconstructionLog,
|
||||||
Configuration conf)
|
HBaseConfiguration conf)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
this.dir = dir;
|
this.dir = dir;
|
||||||
this.compactionDir = new Path(dir, "compaction.dir");
|
this.compactionDir = new Path(dir, "compaction.dir");
|
||||||
|
|
|
@ -122,11 +122,11 @@ public class HStoreFile implements HConstants, WritableComparable {
|
||||||
private String encodedRegionName;
|
private String encodedRegionName;
|
||||||
private Text colFamily;
|
private Text colFamily;
|
||||||
private long fileId;
|
private long fileId;
|
||||||
private final Configuration conf;
|
private final HBaseConfiguration conf;
|
||||||
private Reference reference;
|
private Reference reference;
|
||||||
|
|
||||||
/** Shutdown constructor used by Writable */
|
/** Shutdown constructor used by Writable */
|
||||||
HStoreFile(Configuration conf) {
|
HStoreFile(HBaseConfiguration conf) {
|
||||||
this(conf, new Path(Path.CUR_DIR), "", new Text(), 0);
|
this(conf, new Path(Path.CUR_DIR), "", new Text(), 0);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -138,7 +138,7 @@ public class HStoreFile implements HConstants, WritableComparable {
|
||||||
* @param colFamily name of the column family
|
* @param colFamily name of the column family
|
||||||
* @param fileId file identifier
|
* @param fileId file identifier
|
||||||
*/
|
*/
|
||||||
HStoreFile(final Configuration conf, final Path dir,
|
HStoreFile(final HBaseConfiguration conf, final Path dir,
|
||||||
final String encodedRegionName, final Text colFamily, final long fileId) {
|
final String encodedRegionName, final Text colFamily, final long fileId) {
|
||||||
this(conf, dir, encodedRegionName, colFamily, fileId, null);
|
this(conf, dir, encodedRegionName, colFamily, fileId, null);
|
||||||
}
|
}
|
||||||
|
@ -152,7 +152,7 @@ public class HStoreFile implements HConstants, WritableComparable {
|
||||||
* @param fileId file identifier
|
* @param fileId file identifier
|
||||||
* @param ref Reference to another HStoreFile.
|
* @param ref Reference to another HStoreFile.
|
||||||
*/
|
*/
|
||||||
HStoreFile(Configuration conf, Path dir, String encodedRegionName,
|
HStoreFile(HBaseConfiguration conf, Path dir, String encodedRegionName,
|
||||||
Text colFamily, long fileId, final Reference ref) {
|
Text colFamily, long fileId, final Reference ref) {
|
||||||
this.conf = conf;
|
this.conf = conf;
|
||||||
this.dir = dir;
|
this.dir = dir;
|
||||||
|
@ -348,7 +348,7 @@ public class HStoreFile implements HConstants, WritableComparable {
|
||||||
* Checks the filesystem to determine if the file already exists. If so, it
|
* Checks the filesystem to determine if the file already exists. If so, it
|
||||||
* will keep generating names until it generates a name that does not exist.
|
* will keep generating names until it generates a name that does not exist.
|
||||||
*/
|
*/
|
||||||
static HStoreFile obtainNewHStoreFile(Configuration conf, Path dir,
|
static HStoreFile obtainNewHStoreFile(HBaseConfiguration conf, Path dir,
|
||||||
String encodedRegionName, Text colFamily, FileSystem fs)
|
String encodedRegionName, Text colFamily, FileSystem fs)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
|
||||||
|
@ -378,7 +378,7 @@ public class HStoreFile implements HConstants, WritableComparable {
|
||||||
* @return List of store file instances loaded from passed dir.
|
* @return List of store file instances loaded from passed dir.
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
static List<HStoreFile> loadHStoreFiles(Configuration conf, Path dir,
|
static List<HStoreFile> loadHStoreFiles(HBaseConfiguration conf, Path dir,
|
||||||
String encodedRegionName, Text colFamily, FileSystem fs)
|
String encodedRegionName, Text colFamily, FileSystem fs)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
// Look first at info files. If a reference, these contain info we need
|
// Look first at info files. If a reference, these contain info we need
|
||||||
|
|
|
@ -33,7 +33,6 @@ import java.util.concurrent.atomic.AtomicReferenceArray;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.apache.hadoop.hbase.filter.RowFilterInterface;
|
import org.apache.hadoop.hbase.filter.RowFilterInterface;
|
||||||
import org.apache.hadoop.hbase.filter.StopRowFilter;
|
import org.apache.hadoop.hbase.filter.StopRowFilter;
|
||||||
import org.apache.hadoop.hbase.filter.WhileMatchRowFilter;
|
import org.apache.hadoop.hbase.filter.WhileMatchRowFilter;
|
||||||
|
@ -76,7 +75,7 @@ public class HTable implements HConstants {
|
||||||
* @param tableName name of the table
|
* @param tableName name of the table
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public HTable(Configuration conf, Text tableName) throws IOException {
|
public HTable(HBaseConfiguration conf, Text tableName) throws IOException {
|
||||||
closed = true;
|
closed = true;
|
||||||
this.connection = HConnectionManager.getConnection(conf);
|
this.connection = HConnectionManager.getConnection(conf);
|
||||||
this.tableName = tableName;
|
this.tableName = tableName;
|
||||||
|
|
|
@ -55,14 +55,14 @@ public class LocalHBaseCluster implements HConstants {
|
||||||
private final static int DEFAULT_NO = 1;
|
private final static int DEFAULT_NO = 1;
|
||||||
public static final String LOCAL = "local";
|
public static final String LOCAL = "local";
|
||||||
public static final String LOCAL_COLON = LOCAL + ":";
|
public static final String LOCAL_COLON = LOCAL + ":";
|
||||||
private final Configuration conf;
|
private final HBaseConfiguration conf;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Constructor.
|
* Constructor.
|
||||||
* @param conf
|
* @param conf
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public LocalHBaseCluster(final Configuration conf)
|
public LocalHBaseCluster(final HBaseConfiguration conf)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
this(conf, DEFAULT_NO);
|
this(conf, DEFAULT_NO);
|
||||||
}
|
}
|
||||||
|
@ -74,7 +74,8 @@ public class LocalHBaseCluster implements HConstants {
|
||||||
* @param noRegionServers Count of regionservers to start.
|
* @param noRegionServers Count of regionservers to start.
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public LocalHBaseCluster(final Configuration conf, final int noRegionServers)
|
public LocalHBaseCluster(final HBaseConfiguration conf,
|
||||||
|
final int noRegionServers)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
super();
|
super();
|
||||||
this.conf = conf;
|
this.conf = conf;
|
||||||
|
@ -234,7 +235,7 @@ public class LocalHBaseCluster implements HConstants {
|
||||||
* @return The passed <code>c</code> configuration modified if hbase.master
|
* @return The passed <code>c</code> configuration modified if hbase.master
|
||||||
* value was 'local' otherwise, unaltered.
|
* value was 'local' otherwise, unaltered.
|
||||||
*/
|
*/
|
||||||
static Configuration doLocal(final Configuration c) {
|
static HBaseConfiguration doLocal(final HBaseConfiguration c) {
|
||||||
if (!isLocal(c)) {
|
if (!isLocal(c)) {
|
||||||
return c;
|
return c;
|
||||||
}
|
}
|
||||||
|
@ -263,7 +264,7 @@ public class LocalHBaseCluster implements HConstants {
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public static void main(String[] args) throws IOException {
|
public static void main(String[] args) throws IOException {
|
||||||
Configuration conf = new HBaseConfiguration();
|
HBaseConfiguration conf = new HBaseConfiguration();
|
||||||
LocalHBaseCluster cluster = new LocalHBaseCluster(conf);
|
LocalHBaseCluster cluster = new LocalHBaseCluster(conf);
|
||||||
cluster.startup();
|
cluster.startup();
|
||||||
HBaseAdmin admin = new HBaseAdmin(conf);
|
HBaseAdmin admin = new HBaseAdmin(conf);
|
||||||
|
|
|
@ -74,7 +74,7 @@ public class Shell {
|
||||||
*/
|
*/
|
||||||
public static void main(@SuppressWarnings("unused") String args[])
|
public static void main(@SuppressWarnings("unused") String args[])
|
||||||
throws IOException {
|
throws IOException {
|
||||||
Configuration conf = new HBaseConfiguration();
|
HBaseConfiguration conf = new HBaseConfiguration();
|
||||||
ConsoleReader reader = new ConsoleReader();
|
ConsoleReader reader = new ConsoleReader();
|
||||||
reader.setBellEnabled(conf.getBoolean("hbaseshell.jline.bell.enabled",
|
reader.setBellEnabled(conf.getBoolean("hbaseshell.jline.bell.enabled",
|
||||||
DEFAULT_BELL_ENABLED));
|
DEFAULT_BELL_ENABLED));
|
||||||
|
|
|
@ -36,6 +36,7 @@ import org.apache.hadoop.mapred.JobConfigurable;
|
||||||
import org.apache.hadoop.mapred.RecordReader;
|
import org.apache.hadoop.mapred.RecordReader;
|
||||||
import org.apache.hadoop.mapred.Reporter;
|
import org.apache.hadoop.mapred.Reporter;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.hbase.HTable;
|
import org.apache.hadoop.hbase.HTable;
|
||||||
import org.apache.hadoop.hbase.HScannerInterface;
|
import org.apache.hadoop.hbase.HScannerInterface;
|
||||||
import org.apache.hadoop.hbase.HStoreKey;
|
import org.apache.hadoop.hbase.HStoreKey;
|
||||||
|
@ -195,7 +196,7 @@ implements InputFormat<HStoreKey, MapWritable>, JobConfigurable {
|
||||||
m_cols[i] = new Text(colNames[i]);
|
m_cols[i] = new Text(colNames[i]);
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
m_table = new HTable(job, m_tableName);
|
m_table = new HTable(new HBaseConfiguration(job), m_tableName);
|
||||||
} catch (Exception e) {
|
} catch (Exception e) {
|
||||||
LOG.error(e);
|
LOG.error(e);
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,6 +34,7 @@ import org.apache.hadoop.mapred.RecordWriter;
|
||||||
import org.apache.hadoop.mapred.Reporter;
|
import org.apache.hadoop.mapred.Reporter;
|
||||||
import org.apache.hadoop.util.Progressable;
|
import org.apache.hadoop.util.Progressable;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.hbase.HTable;
|
import org.apache.hadoop.hbase.HTable;
|
||||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||||
|
|
||||||
|
@ -51,7 +52,9 @@ public class TableOutputFormat
|
||||||
static final Logger LOG = Logger.getLogger(TableOutputFormat.class.getName());
|
static final Logger LOG = Logger.getLogger(TableOutputFormat.class.getName());
|
||||||
|
|
||||||
/** constructor */
|
/** constructor */
|
||||||
public TableOutputFormat() {}
|
public TableOutputFormat() {
|
||||||
|
super();
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Convert Reduce output (key, value) to (HStoreKey, KeyedDataArrayWritable)
|
* Convert Reduce output (key, value) to (HStoreKey, KeyedDataArrayWritable)
|
||||||
|
@ -71,7 +74,9 @@ public class TableOutputFormat
|
||||||
}
|
}
|
||||||
|
|
||||||
/** {@inheritDoc} */
|
/** {@inheritDoc} */
|
||||||
public void close(@SuppressWarnings("unused") Reporter reporter) {}
|
public void close(@SuppressWarnings("unused") Reporter reporter) {
|
||||||
|
// Nothing to do.
|
||||||
|
}
|
||||||
|
|
||||||
/** {@inheritDoc} */
|
/** {@inheritDoc} */
|
||||||
public void write(Text key, MapWritable value) throws IOException {
|
public void write(Text key, MapWritable value) throws IOException {
|
||||||
|
@ -99,7 +104,7 @@ public class TableOutputFormat
|
||||||
Text tableName = new Text(job.get(OUTPUT_TABLE));
|
Text tableName = new Text(job.get(OUTPUT_TABLE));
|
||||||
HTable table = null;
|
HTable table = null;
|
||||||
try {
|
try {
|
||||||
table = new HTable(job, tableName);
|
table = new HTable(new HBaseConfiguration(job), tableName);
|
||||||
} catch(IOException e) {
|
} catch(IOException e) {
|
||||||
LOG.error(e);
|
LOG.error(e);
|
||||||
throw e;
|
throw e;
|
||||||
|
|
|
@ -25,8 +25,8 @@ import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.apache.hadoop.hbase.HBaseAdmin;
|
import org.apache.hadoop.hbase.HBaseAdmin;
|
||||||
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
|
||||||
|
@ -45,7 +45,7 @@ public class AlterCommand extends SchemaModificationCommand {
|
||||||
super(o);
|
super(o);
|
||||||
}
|
}
|
||||||
|
|
||||||
public ReturnMsg execute(Configuration conf) {
|
public ReturnMsg execute(HBaseConfiguration conf) {
|
||||||
try {
|
try {
|
||||||
HBaseAdmin admin = new HBaseAdmin(conf);
|
HBaseAdmin admin = new HBaseAdmin(conf);
|
||||||
Set<String> columns = null;
|
Set<String> columns = null;
|
||||||
|
|
|
@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.shell;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.Writer;
|
import java.io.Writer;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Clears the console screen.
|
* Clears the console screen.
|
||||||
|
@ -32,7 +32,7 @@ public class ClearCommand extends BasicCommand {
|
||||||
super(o);
|
super(o);
|
||||||
}
|
}
|
||||||
|
|
||||||
public ReturnMsg execute(@SuppressWarnings("unused") Configuration conf) {
|
public ReturnMsg execute(@SuppressWarnings("unused") HBaseConfiguration conf) {
|
||||||
clear();
|
clear();
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,7 +19,7 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase.shell;
|
package org.apache.hadoop.hbase.shell;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
|
|
||||||
public interface Command {
|
public interface Command {
|
||||||
/** family indicator */
|
/** family indicator */
|
||||||
|
@ -31,7 +31,7 @@ public interface Command {
|
||||||
* @param conf Configuration
|
* @param conf Configuration
|
||||||
* @return Result of command execution
|
* @return Result of command execution
|
||||||
*/
|
*/
|
||||||
public ReturnMsg execute(final Configuration conf);
|
public ReturnMsg execute(final HBaseConfiguration conf);
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @return Type of this command whether DDL, SELECT, INSERT, UPDATE, DELETE,
|
* @return Type of this command whether DDL, SELECT, INSERT, UPDATE, DELETE,
|
||||||
|
|
|
@ -24,8 +24,8 @@ import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Set;
|
import java.util.Set;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.apache.hadoop.hbase.HBaseAdmin;
|
import org.apache.hadoop.hbase.HBaseAdmin;
|
||||||
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
|
|
||||||
|
@ -41,7 +41,7 @@ public class CreateCommand extends SchemaModificationCommand {
|
||||||
super(o);
|
super(o);
|
||||||
}
|
}
|
||||||
|
|
||||||
public ReturnMsg execute(Configuration conf) {
|
public ReturnMsg execute(HBaseConfiguration conf) {
|
||||||
try {
|
try {
|
||||||
HBaseAdmin admin = new HBaseAdmin(conf);
|
HBaseAdmin admin = new HBaseAdmin(conf);
|
||||||
HTableDescriptor tableDesc = new HTableDescriptor(tableName);
|
HTableDescriptor tableDesc = new HTableDescriptor(tableName);
|
||||||
|
|
|
@ -24,8 +24,8 @@ import java.io.Writer;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.apache.hadoop.hbase.HBaseAdmin;
|
import org.apache.hadoop.hbase.HBaseAdmin;
|
||||||
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.hbase.HTable;
|
import org.apache.hadoop.hbase.HTable;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
|
||||||
|
@ -41,7 +41,7 @@ public class DeleteCommand extends BasicCommand {
|
||||||
private String rowKey;
|
private String rowKey;
|
||||||
private List<String> columnList;
|
private List<String> columnList;
|
||||||
|
|
||||||
public ReturnMsg execute(Configuration conf) {
|
public ReturnMsg execute(HBaseConfiguration conf) {
|
||||||
if (columnList == null) {
|
if (columnList == null) {
|
||||||
throw new IllegalArgumentException("Column list is null");
|
throw new IllegalArgumentException("Column list is null");
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,7 +22,7 @@ package org.apache.hadoop.hbase.shell;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.Writer;
|
import java.io.Writer;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||||
import org.apache.hadoop.hbase.HConnection;
|
import org.apache.hadoop.hbase.HConnection;
|
||||||
import org.apache.hadoop.hbase.HConnectionManager;
|
import org.apache.hadoop.hbase.HConnectionManager;
|
||||||
|
@ -49,7 +49,7 @@ public class DescCommand extends BasicCommand {
|
||||||
this.formatter = f;
|
this.formatter = f;
|
||||||
}
|
}
|
||||||
|
|
||||||
public ReturnMsg execute(final Configuration conf) {
|
public ReturnMsg execute(final HBaseConfiguration conf) {
|
||||||
if (this.tableName == null)
|
if (this.tableName == null)
|
||||||
return new ReturnMsg(0, "Syntax error : Please check 'Describe' syntax");
|
return new ReturnMsg(0, "Syntax error : Please check 'Describe' syntax");
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -22,8 +22,8 @@ package org.apache.hadoop.hbase.shell;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.Writer;
|
import java.io.Writer;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.apache.hadoop.hbase.HBaseAdmin;
|
import org.apache.hadoop.hbase.HBaseAdmin;
|
||||||
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -36,7 +36,7 @@ public class DisableCommand extends BasicCommand {
|
||||||
super(o);
|
super(o);
|
||||||
}
|
}
|
||||||
|
|
||||||
public ReturnMsg execute(Configuration conf) {
|
public ReturnMsg execute(HBaseConfiguration conf) {
|
||||||
assert tableName != null;
|
assert tableName != null;
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -23,8 +23,8 @@ import java.io.IOException;
|
||||||
import java.io.Writer;
|
import java.io.Writer;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.apache.hadoop.hbase.HBaseAdmin;
|
import org.apache.hadoop.hbase.HBaseAdmin;
|
||||||
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -37,7 +37,7 @@ public class DropCommand extends BasicCommand {
|
||||||
super(o);
|
super(o);
|
||||||
}
|
}
|
||||||
|
|
||||||
public ReturnMsg execute(Configuration conf) {
|
public ReturnMsg execute(HBaseConfiguration conf) {
|
||||||
if (tableList == null) {
|
if (tableList == null) {
|
||||||
throw new IllegalArgumentException("List of tables is null");
|
throw new IllegalArgumentException("List of tables is null");
|
||||||
}
|
}
|
||||||
|
|
|
@ -22,8 +22,8 @@ package org.apache.hadoop.hbase.shell;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.Writer;
|
import java.io.Writer;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.apache.hadoop.hbase.HBaseAdmin;
|
import org.apache.hadoop.hbase.HBaseAdmin;
|
||||||
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -36,7 +36,7 @@ public class EnableCommand extends BasicCommand {
|
||||||
super(o);
|
super(o);
|
||||||
}
|
}
|
||||||
|
|
||||||
public ReturnMsg execute(Configuration conf) {
|
public ReturnMsg execute(HBaseConfiguration conf) {
|
||||||
assert tableName != null;
|
assert tableName != null;
|
||||||
try {
|
try {
|
||||||
HBaseAdmin admin = new HBaseAdmin(conf);
|
HBaseAdmin admin = new HBaseAdmin(conf);
|
||||||
|
|
|
@ -21,14 +21,14 @@ package org.apache.hadoop.hbase.shell;
|
||||||
|
|
||||||
import java.io.Writer;
|
import java.io.Writer;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
|
|
||||||
public class ExitCommand extends BasicCommand {
|
public class ExitCommand extends BasicCommand {
|
||||||
public ExitCommand(Writer o) {
|
public ExitCommand(Writer o) {
|
||||||
super(o);
|
super(o);
|
||||||
}
|
}
|
||||||
|
|
||||||
public ReturnMsg execute(@SuppressWarnings("unused") Configuration conf) {
|
public ReturnMsg execute(@SuppressWarnings("unused") HBaseConfiguration conf) {
|
||||||
// TOD: Is this the best way to exit? Would be a problem if shell is run
|
// TOD: Is this the best way to exit? Would be a problem if shell is run
|
||||||
// inside another program -- St.Ack 09/11/2007
|
// inside another program -- St.Ack 09/11/2007
|
||||||
System.exit(1);
|
System.exit(1);
|
||||||
|
|
|
@ -22,8 +22,8 @@ package org.apache.hadoop.hbase.shell;
|
||||||
import java.io.Writer;
|
import java.io.Writer;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.apache.hadoop.fs.FsShell;
|
import org.apache.hadoop.fs.FsShell;
|
||||||
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.util.ToolRunner;
|
import org.apache.hadoop.util.ToolRunner;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -36,7 +36,7 @@ public class FsCommand extends BasicCommand {
|
||||||
super(o);
|
super(o);
|
||||||
}
|
}
|
||||||
|
|
||||||
public ReturnMsg execute(@SuppressWarnings("unused") Configuration conf) {
|
public ReturnMsg execute(@SuppressWarnings("unused") HBaseConfiguration conf) {
|
||||||
// This commmand will write the
|
// This commmand will write the
|
||||||
FsShell shell = new FsShell();
|
FsShell shell = new FsShell();
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -52,7 +52,7 @@ public class HelpCommand extends BasicCommand {
|
||||||
this.formatter = f;
|
this.formatter = f;
|
||||||
}
|
}
|
||||||
|
|
||||||
public ReturnMsg execute(@SuppressWarnings("unused") Configuration conf) {
|
public ReturnMsg execute(@SuppressWarnings("unused") HBaseConfiguration conf) {
|
||||||
try {
|
try {
|
||||||
printHelp(this.argument);
|
printHelp(this.argument);
|
||||||
} catch (IOException e) {
|
} catch (IOException e) {
|
||||||
|
|
|
@ -23,7 +23,7 @@ import java.io.IOException;
|
||||||
import java.io.Writer;
|
import java.io.Writer;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.hbase.HTable;
|
import org.apache.hadoop.hbase.HTable;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
|
||||||
|
@ -40,7 +40,7 @@ public class InsertCommand extends BasicCommand {
|
||||||
super(o);
|
super(o);
|
||||||
}
|
}
|
||||||
|
|
||||||
public ReturnMsg execute(Configuration conf) {
|
public ReturnMsg execute(HBaseConfiguration conf) {
|
||||||
if (this.tableName == null || this.values == null || this.rowKey == null)
|
if (this.tableName == null || this.values == null || this.rowKey == null)
|
||||||
return new ReturnMsg(0, "Syntax error : Please check 'Insert' syntax.");
|
return new ReturnMsg(0, "Syntax error : Please check 'Insert' syntax.");
|
||||||
|
|
||||||
|
|
|
@ -35,6 +35,7 @@ import java.util.jar.Manifest;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
import org.apache.hadoop.conf.Configuration;
|
||||||
import org.apache.hadoop.fs.FileUtil;
|
import org.apache.hadoop.fs.FileUtil;
|
||||||
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.util.RunJar;
|
import org.apache.hadoop.util.RunJar;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -48,7 +49,7 @@ public class JarCommand extends BasicCommand {
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("deprecation")
|
@SuppressWarnings("deprecation")
|
||||||
public ReturnMsg execute(@SuppressWarnings("unused") Configuration conf) {
|
public ReturnMsg execute(@SuppressWarnings("unused") HBaseConfiguration conf) {
|
||||||
|
|
||||||
try {
|
try {
|
||||||
String[] args = getQuery();
|
String[] args = getQuery();
|
||||||
|
|
|
@ -19,9 +19,11 @@
|
||||||
*/
|
*/
|
||||||
package org.apache.hadoop.hbase.shell;
|
package org.apache.hadoop.hbase.shell;
|
||||||
|
|
||||||
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Message returned when a {@link Command} is
|
* Message returned when a {@link Command} is
|
||||||
* {@link Command#execute(org.apache.hadoop.conf.Configuration)}'ed.
|
* {@link Command#execute(HBaseConfiguration)}'ed.
|
||||||
*/
|
*/
|
||||||
public class ReturnMsg {
|
public class ReturnMsg {
|
||||||
private final String msg;
|
private final String msg;
|
||||||
|
|
|
@ -28,7 +28,6 @@ import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.TreeMap;
|
import java.util.TreeMap;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.apache.hadoop.hbase.HBaseAdmin;
|
import org.apache.hadoop.hbase.HBaseAdmin;
|
||||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
@ -75,7 +74,7 @@ public class SelectCommand extends BasicCommand {
|
||||||
this.formatter = f;
|
this.formatter = f;
|
||||||
}
|
}
|
||||||
|
|
||||||
public ReturnMsg execute(final Configuration conf) {
|
public ReturnMsg execute(final HBaseConfiguration conf) {
|
||||||
if (this.tableName.equals("") || this.rowKey == null ||
|
if (this.tableName.equals("") || this.rowKey == null ||
|
||||||
this.columns.size() == 0) {
|
this.columns.size() == 0) {
|
||||||
return new ReturnMsg(0, "Syntax error : Please check 'Select' syntax.");
|
return new ReturnMsg(0, "Syntax error : Please check 'Select' syntax.");
|
||||||
|
|
|
@ -22,8 +22,8 @@ package org.apache.hadoop.hbase.shell;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.io.Writer;
|
import java.io.Writer;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.apache.hadoop.hbase.HBaseAdmin;
|
import org.apache.hadoop.hbase.HBaseAdmin;
|
||||||
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.hbase.HTableDescriptor;
|
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -51,7 +51,7 @@ public class ShowCommand extends BasicCommand {
|
||||||
this.command = argument;
|
this.command = argument;
|
||||||
}
|
}
|
||||||
|
|
||||||
public ReturnMsg execute(final Configuration conf) {
|
public ReturnMsg execute(final HBaseConfiguration conf) {
|
||||||
if (this.command == null) {
|
if (this.command == null) {
|
||||||
return new ReturnMsg(0, "Syntax error : Please check 'Show' syntax");
|
return new ReturnMsg(0, "Syntax error : Please check 'Show' syntax");
|
||||||
}
|
}
|
||||||
|
|
|
@ -51,7 +51,7 @@ public abstract class HBaseTestCase extends TestCase {
|
||||||
StaticTestEnvironment.initialize();
|
StaticTestEnvironment.initialize();
|
||||||
}
|
}
|
||||||
|
|
||||||
protected volatile Configuration conf;
|
protected volatile HBaseConfiguration conf;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* constructor
|
* constructor
|
||||||
|
|
|
@ -23,7 +23,6 @@ import java.io.File;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.apache.hadoop.dfs.MiniDFSCluster;
|
import org.apache.hadoop.dfs.MiniDFSCluster;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
@ -37,7 +36,7 @@ public class MiniHBaseCluster implements HConstants {
|
||||||
static final Logger LOG =
|
static final Logger LOG =
|
||||||
Logger.getLogger(MiniHBaseCluster.class.getName());
|
Logger.getLogger(MiniHBaseCluster.class.getName());
|
||||||
|
|
||||||
private Configuration conf;
|
private HBaseConfiguration conf;
|
||||||
private MiniDFSCluster cluster;
|
private MiniDFSCluster cluster;
|
||||||
private FileSystem fs;
|
private FileSystem fs;
|
||||||
private boolean shutdownDFS;
|
private boolean shutdownDFS;
|
||||||
|
@ -52,7 +51,7 @@ public class MiniHBaseCluster implements HConstants {
|
||||||
* @param nRegionNodes
|
* @param nRegionNodes
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public MiniHBaseCluster(Configuration conf, int nRegionNodes)
|
public MiniHBaseCluster(HBaseConfiguration conf, int nRegionNodes)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
this(conf, nRegionNodes, true, true, true);
|
this(conf, nRegionNodes, true, true, true);
|
||||||
}
|
}
|
||||||
|
@ -66,7 +65,7 @@ public class MiniHBaseCluster implements HConstants {
|
||||||
* @param miniHdfsFilesystem
|
* @param miniHdfsFilesystem
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public MiniHBaseCluster(Configuration conf, int nRegionNodes,
|
public MiniHBaseCluster(HBaseConfiguration conf, int nRegionNodes,
|
||||||
final boolean miniHdfsFilesystem) throws IOException {
|
final boolean miniHdfsFilesystem) throws IOException {
|
||||||
this(conf, nRegionNodes, miniHdfsFilesystem, true, true);
|
this(conf, nRegionNodes, miniHdfsFilesystem, true, true);
|
||||||
}
|
}
|
||||||
|
@ -89,7 +88,7 @@ public class MiniHBaseCluster implements HConstants {
|
||||||
* @param dfsCluster
|
* @param dfsCluster
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public MiniHBaseCluster(Configuration conf, int nRegionNodes,
|
public MiniHBaseCluster(HBaseConfiguration conf, int nRegionNodes,
|
||||||
MiniDFSCluster dfsCluster) throws IOException {
|
MiniDFSCluster dfsCluster) throws IOException {
|
||||||
|
|
||||||
this.conf = conf;
|
this.conf = conf;
|
||||||
|
@ -110,7 +109,7 @@ public class MiniHBaseCluster implements HConstants {
|
||||||
* @param deleteOnExit clean up mini hdfs files
|
* @param deleteOnExit clean up mini hdfs files
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
public MiniHBaseCluster(Configuration conf, int nRegionNodes,
|
public MiniHBaseCluster(HBaseConfiguration conf, int nRegionNodes,
|
||||||
final boolean miniHdfsFilesystem, boolean format, boolean deleteOnExit)
|
final boolean miniHdfsFilesystem, boolean format, boolean deleteOnExit)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
|
||||||
|
|
|
@ -27,7 +27,6 @@ import java.util.TreeMap;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hbase.util.Writables;
|
import org.apache.hadoop.hbase.util.Writables;
|
||||||
|
@ -52,7 +51,7 @@ public class MultiRegionTable extends HBaseTestCase {
|
||||||
* @throws IOException
|
* @throws IOException
|
||||||
*/
|
*/
|
||||||
@SuppressWarnings("null")
|
@SuppressWarnings("null")
|
||||||
public static void makeMultiRegionTable(Configuration conf,
|
public static void makeMultiRegionTable(HBaseConfiguration conf,
|
||||||
MiniHBaseCluster cluster, FileSystem localFs, String tableName,
|
MiniHBaseCluster cluster, FileSystem localFs, String tableName,
|
||||||
String columnName)
|
String columnName)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
|
|
|
@ -23,7 +23,6 @@ import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -35,11 +34,11 @@ import org.apache.hadoop.fs.Path;
|
||||||
public class OOMEHMaster extends HMaster {
|
public class OOMEHMaster extends HMaster {
|
||||||
private List<byte []> retainer = new ArrayList<byte[]>();
|
private List<byte []> retainer = new ArrayList<byte[]>();
|
||||||
|
|
||||||
public OOMEHMaster(Configuration conf) throws IOException {
|
public OOMEHMaster(HBaseConfiguration conf) throws IOException {
|
||||||
super(conf);
|
super(conf);
|
||||||
}
|
}
|
||||||
|
|
||||||
public OOMEHMaster(Path dir, HServerAddress address, Configuration conf)
|
public OOMEHMaster(Path dir, HServerAddress address, HBaseConfiguration conf)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
super(dir, address, conf);
|
super(dir, address, conf);
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,7 +23,6 @@ import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
import java.util.ArrayList;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.apache.hadoop.hbase.io.BatchUpdate;
|
import org.apache.hadoop.hbase.io.BatchUpdate;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
|
||||||
|
@ -37,11 +36,11 @@ import org.apache.hadoop.io.Text;
|
||||||
public class OOMERegionServer extends HRegionServer {
|
public class OOMERegionServer extends HRegionServer {
|
||||||
private List<BatchUpdate> retainer = new ArrayList<BatchUpdate>();
|
private List<BatchUpdate> retainer = new ArrayList<BatchUpdate>();
|
||||||
|
|
||||||
public OOMERegionServer(Configuration conf) throws IOException {
|
public OOMERegionServer(HBaseConfiguration conf) throws IOException {
|
||||||
super(conf);
|
super(conf);
|
||||||
}
|
}
|
||||||
|
|
||||||
public OOMERegionServer(HServerAddress address, Configuration conf)
|
public OOMERegionServer(HServerAddress address, HBaseConfiguration conf)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
super(address, conf);
|
super(address, conf);
|
||||||
}
|
}
|
||||||
|
|
|
@ -103,7 +103,7 @@ public class PerformanceEvaluation implements HConstants {
|
||||||
SEQUENTIAL_WRITE,
|
SEQUENTIAL_WRITE,
|
||||||
SCAN});
|
SCAN});
|
||||||
|
|
||||||
volatile Configuration conf;
|
volatile HBaseConfiguration conf;
|
||||||
private boolean miniCluster = false;
|
private boolean miniCluster = false;
|
||||||
private int N = 1;
|
private int N = 1;
|
||||||
private int R = ROWS_PER_GB;
|
private int R = ROWS_PER_GB;
|
||||||
|
@ -131,7 +131,7 @@ public class PerformanceEvaluation implements HConstants {
|
||||||
* Constructor
|
* Constructor
|
||||||
* @param c Configuration object
|
* @param c Configuration object
|
||||||
*/
|
*/
|
||||||
public PerformanceEvaluation(final Configuration c) {
|
public PerformanceEvaluation(final HBaseConfiguration c) {
|
||||||
this.conf = c;
|
this.conf = c;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -163,7 +163,7 @@ public class PerformanceEvaluation implements HConstants {
|
||||||
public void configure(JobConf j) {
|
public void configure(JobConf j) {
|
||||||
this.cmd = j.get(CMD_KEY);
|
this.cmd = j.get(CMD_KEY);
|
||||||
|
|
||||||
this.pe = new PerformanceEvaluation(j);
|
this.pe = new PerformanceEvaluation(new HBaseConfiguration(j));
|
||||||
}
|
}
|
||||||
|
|
||||||
/** {@inheritDoc} */
|
/** {@inheritDoc} */
|
||||||
|
@ -292,9 +292,9 @@ public class PerformanceEvaluation implements HConstants {
|
||||||
private final Status status;
|
private final Status status;
|
||||||
protected HBaseAdmin admin;
|
protected HBaseAdmin admin;
|
||||||
protected HTable table;
|
protected HTable table;
|
||||||
protected volatile Configuration conf;
|
protected volatile HBaseConfiguration conf;
|
||||||
|
|
||||||
Test(final Configuration conf, final int startRow,
|
Test(final HBaseConfiguration conf, final int startRow,
|
||||||
final int perClientRunRows, final int totalRows, final Status status) {
|
final int perClientRunRows, final int totalRows, final Status status) {
|
||||||
super();
|
super();
|
||||||
this.startRow = startRow;
|
this.startRow = startRow;
|
||||||
|
@ -383,7 +383,7 @@ public class PerformanceEvaluation implements HConstants {
|
||||||
}
|
}
|
||||||
|
|
||||||
class RandomReadTest extends Test {
|
class RandomReadTest extends Test {
|
||||||
RandomReadTest(final Configuration conf, final int startRow,
|
RandomReadTest(final HBaseConfiguration conf, final int startRow,
|
||||||
final int perClientRunRows, final int totalRows, final Status status) {
|
final int perClientRunRows, final int totalRows, final Status status) {
|
||||||
super(conf, startRow, perClientRunRows, totalRows, status);
|
super(conf, startRow, perClientRunRows, totalRows, status);
|
||||||
}
|
}
|
||||||
|
@ -406,7 +406,7 @@ public class PerformanceEvaluation implements HConstants {
|
||||||
}
|
}
|
||||||
|
|
||||||
class RandomWriteTest extends Test {
|
class RandomWriteTest extends Test {
|
||||||
RandomWriteTest(final Configuration conf, final int startRow,
|
RandomWriteTest(final HBaseConfiguration conf, final int startRow,
|
||||||
final int perClientRunRows, final int totalRows, final Status status) {
|
final int perClientRunRows, final int totalRows, final Status status) {
|
||||||
super(conf, startRow, perClientRunRows, totalRows, status);
|
super(conf, startRow, perClientRunRows, totalRows, status);
|
||||||
}
|
}
|
||||||
|
@ -430,7 +430,7 @@ public class PerformanceEvaluation implements HConstants {
|
||||||
private HStoreKey key = new HStoreKey();
|
private HStoreKey key = new HStoreKey();
|
||||||
private TreeMap<Text, byte[]> results = new TreeMap<Text, byte[]>();
|
private TreeMap<Text, byte[]> results = new TreeMap<Text, byte[]>();
|
||||||
|
|
||||||
ScanTest(final Configuration conf, final int startRow,
|
ScanTest(final HBaseConfiguration conf, final int startRow,
|
||||||
final int perClientRunRows, final int totalRows, final Status status) {
|
final int perClientRunRows, final int totalRows, final Status status) {
|
||||||
super(conf, startRow, perClientRunRows, totalRows, status);
|
super(conf, startRow, perClientRunRows, totalRows, status);
|
||||||
}
|
}
|
||||||
|
@ -464,7 +464,7 @@ public class PerformanceEvaluation implements HConstants {
|
||||||
}
|
}
|
||||||
|
|
||||||
class SequentialReadTest extends Test {
|
class SequentialReadTest extends Test {
|
||||||
SequentialReadTest(final Configuration conf, final int startRow,
|
SequentialReadTest(final HBaseConfiguration conf, final int startRow,
|
||||||
final int perClientRunRows, final int totalRows, final Status status) {
|
final int perClientRunRows, final int totalRows, final Status status) {
|
||||||
super(conf, startRow, perClientRunRows, totalRows, status);
|
super(conf, startRow, perClientRunRows, totalRows, status);
|
||||||
}
|
}
|
||||||
|
@ -481,7 +481,7 @@ public class PerformanceEvaluation implements HConstants {
|
||||||
}
|
}
|
||||||
|
|
||||||
class SequentialWriteTest extends Test {
|
class SequentialWriteTest extends Test {
|
||||||
SequentialWriteTest(final Configuration conf, final int startRow,
|
SequentialWriteTest(final HBaseConfiguration conf, final int startRow,
|
||||||
final int perClientRunRows, final int totalRows, final Status status) {
|
final int perClientRunRows, final int totalRows, final Status status) {
|
||||||
super(conf, startRow, perClientRunRows, totalRows, status);
|
super(conf, startRow, perClientRunRows, totalRows, status);
|
||||||
}
|
}
|
||||||
|
|
|
@ -24,13 +24,11 @@ import java.io.DataOutputStream;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.TreeMap;
|
import java.util.TreeMap;
|
||||||
|
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.apache.hadoop.dfs.MiniDFSCluster;
|
import org.apache.hadoop.dfs.MiniDFSCluster;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.io.Text;
|
|
||||||
|
|
||||||
import org.apache.hadoop.hbase.util.Writables;
|
import org.apache.hadoop.hbase.util.Writables;
|
||||||
|
import org.apache.hadoop.io.Text;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test of a long-lived scanner validating as we go.
|
* Test of a long-lived scanner validating as we go.
|
||||||
|
@ -135,7 +133,7 @@ public class TestScanner extends HBaseTestCase {
|
||||||
|
|
||||||
// Initialization
|
// Initialization
|
||||||
|
|
||||||
Configuration conf = new HBaseConfiguration();
|
HBaseConfiguration conf = new HBaseConfiguration();
|
||||||
cluster = new MiniDFSCluster(conf, 2, true, (String[])null);
|
cluster = new MiniDFSCluster(conf, 2, true, (String[])null);
|
||||||
fs = cluster.getFileSystem();
|
fs = cluster.getFileSystem();
|
||||||
Path dir = new Path("/hbase");
|
Path dir = new Path("/hbase");
|
||||||
|
|
|
@ -30,11 +30,11 @@ import junit.textui.TestRunner;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.apache.hadoop.dfs.MiniDFSCluster;
|
import org.apache.hadoop.dfs.MiniDFSCluster;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hbase.HBaseAdmin;
|
import org.apache.hadoop.hbase.HBaseAdmin;
|
||||||
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.hbase.HBaseTestCase;
|
import org.apache.hadoop.hbase.HBaseTestCase;
|
||||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
|
@ -211,7 +211,8 @@ public class TestTableIndex extends HBaseTestCase {
|
||||||
return c.toString();
|
return c.toString();
|
||||||
}
|
}
|
||||||
|
|
||||||
private void scanTable(Configuration c, long firstK) throws IOException {
|
private void scanTable(HBaseConfiguration c, long firstK)
|
||||||
|
throws IOException {
|
||||||
HTable table = new HTable(c, new Text(TABLE_NAME));
|
HTable table = new HTable(c, new Text(TABLE_NAME));
|
||||||
Text[] columns = { TEXT_INPUT_COLUMN, TEXT_OUTPUT_COLUMN };
|
Text[] columns = { TEXT_INPUT_COLUMN, TEXT_OUTPUT_COLUMN };
|
||||||
HScannerInterface scanner = table.obtainScanner(columns,
|
HScannerInterface scanner = table.obtainScanner(columns,
|
||||||
|
@ -235,7 +236,7 @@ public class TestTableIndex extends HBaseTestCase {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private void verify(Configuration c) throws IOException {
|
private void verify(HBaseConfiguration c) throws IOException {
|
||||||
Path localDir = new Path(this.testDir, "index_" +
|
Path localDir = new Path(this.testDir, "index_" +
|
||||||
Integer.toString(new Random().nextInt()));
|
Integer.toString(new Random().nextInt()));
|
||||||
this.fs.copyToLocalFile(new Path(INDEX_DIR), localDir);
|
this.fs.copyToLocalFile(new Path(INDEX_DIR), localDir);
|
||||||
|
|
|
@ -26,17 +26,11 @@ import java.util.TreeMap;
|
||||||
|
|
||||||
import org.apache.commons.logging.Log;
|
import org.apache.commons.logging.Log;
|
||||||
import org.apache.commons.logging.LogFactory;
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.conf.Configuration;
|
|
||||||
import org.apache.hadoop.dfs.MiniDFSCluster;
|
import org.apache.hadoop.dfs.MiniDFSCluster;
|
||||||
import org.apache.hadoop.fs.FileSystem;
|
import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.io.MapWritable;
|
|
||||||
import org.apache.hadoop.io.Text;
|
|
||||||
import org.apache.hadoop.mapred.JobClient;
|
|
||||||
import org.apache.hadoop.mapred.JobConf;
|
|
||||||
import org.apache.hadoop.mapred.MiniMRCluster;
|
|
||||||
import org.apache.hadoop.mapred.Reporter;
|
|
||||||
import org.apache.hadoop.hbase.HBaseAdmin;
|
import org.apache.hadoop.hbase.HBaseAdmin;
|
||||||
|
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||||
import org.apache.hadoop.hbase.HColumnDescriptor;
|
import org.apache.hadoop.hbase.HColumnDescriptor;
|
||||||
import org.apache.hadoop.hbase.HConstants;
|
import org.apache.hadoop.hbase.HConstants;
|
||||||
import org.apache.hadoop.hbase.HScannerInterface;
|
import org.apache.hadoop.hbase.HScannerInterface;
|
||||||
|
@ -46,10 +40,12 @@ import org.apache.hadoop.hbase.HTableDescriptor;
|
||||||
import org.apache.hadoop.hbase.MiniHBaseCluster;
|
import org.apache.hadoop.hbase.MiniHBaseCluster;
|
||||||
import org.apache.hadoop.hbase.MultiRegionTable;
|
import org.apache.hadoop.hbase.MultiRegionTable;
|
||||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||||
import org.apache.hadoop.hbase.mapred.TableMap;
|
import org.apache.hadoop.io.MapWritable;
|
||||||
import org.apache.hadoop.hbase.mapred.TableOutputCollector;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.hadoop.hbase.mapred.TableReduce;
|
import org.apache.hadoop.mapred.JobClient;
|
||||||
import org.apache.hadoop.hbase.mapred.IdentityTableReduce;
|
import org.apache.hadoop.mapred.JobConf;
|
||||||
|
import org.apache.hadoop.mapred.MiniMRCluster;
|
||||||
|
import org.apache.hadoop.mapred.Reporter;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Test Map/Reduce job over HBase tables
|
* Test Map/Reduce job over HBase tables
|
||||||
|
@ -314,7 +310,7 @@ public class TestTableMapReduce extends MultiRegionTable {
|
||||||
verify(conf, MULTI_REGION_TABLE_NAME);
|
verify(conf, MULTI_REGION_TABLE_NAME);
|
||||||
}
|
}
|
||||||
|
|
||||||
private void scanTable(Configuration conf, String tableName)
|
private void scanTable(HBaseConfiguration conf, String tableName)
|
||||||
throws IOException {
|
throws IOException {
|
||||||
HTable table = new HTable(conf, new Text(tableName));
|
HTable table = new HTable(conf, new Text(tableName));
|
||||||
|
|
||||||
|
@ -344,7 +340,8 @@ public class TestTableMapReduce extends MultiRegionTable {
|
||||||
}
|
}
|
||||||
|
|
||||||
@SuppressWarnings("null")
|
@SuppressWarnings("null")
|
||||||
private void verify(Configuration conf, String tableName) throws IOException {
|
private void verify(HBaseConfiguration conf, String tableName)
|
||||||
|
throws IOException {
|
||||||
HTable table = new HTable(conf, new Text(tableName));
|
HTable table = new HTable(conf, new Text(tableName));
|
||||||
|
|
||||||
Text[] columns = {
|
Text[] columns = {
|
||||||
|
|
Loading…
Reference in New Issue