HADOOP-1466 Clean up warnings, visibility and javadoc issues in HBase.
Works in my environment. Since no changes were made to the code aside from white space adjustment, not testing with Hudson. git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk/src/contrib/hbase@562608 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
4fa87a0cbb
commit
b205cac36d
|
@ -84,3 +84,4 @@ Trunk (unreleased changes)
|
|||
(except TestHClient and HBaseShell) have been converted to use the new client
|
||||
side objects (HTable/HBaseAdmin/HConnection) instead of HClient.
|
||||
53. HADOOP-1528 HClient for multiple tables - expose close table function
|
||||
54. HADOOP-1466 Clean up warnings, visibility and javadoc issues in HBase.
|
||||
|
|
|
@ -79,6 +79,7 @@ public class BloomFilterDescriptor implements WritableComparable {
|
|||
int vectorSize;
|
||||
int nbHash;
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuilder value = new StringBuilder();
|
||||
|
@ -103,11 +104,13 @@ public class BloomFilterDescriptor implements WritableComparable {
|
|||
return value.toString();
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
return compareTo(obj) == 0;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = Integer.valueOf(this.filterType).hashCode();
|
||||
|
@ -118,18 +121,14 @@ public class BloomFilterDescriptor implements WritableComparable {
|
|||
|
||||
// Writable
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.apache.hadoop.io.Writable#readFields(java.io.DataInput)
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public void readFields(DataInput in) throws IOException {
|
||||
filterType = in.readInt();
|
||||
vectorSize = in.readInt();
|
||||
nbHash = in.readInt();
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.apache.hadoop.io.Writable#write(java.io.DataOutput)
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public void write(DataOutput out) throws IOException {
|
||||
out.writeInt(filterType);
|
||||
out.writeInt(vectorSize);
|
||||
|
@ -138,9 +137,7 @@ public class BloomFilterDescriptor implements WritableComparable {
|
|||
|
||||
// Comparable
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.lang.Comparable#compareTo(java.lang.Object)
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public int compareTo(Object o) {
|
||||
BloomFilterDescriptor other = (BloomFilterDescriptor)o;
|
||||
int result = this.filterType - other.filterType;
|
||||
|
|
|
@ -41,7 +41,7 @@ public abstract class HAbstractScanner implements HInternalScannerInterface {
|
|||
static Pattern isRegexPattern =
|
||||
Pattern.compile("^.*[\\\\+|^&*$\\[\\]\\}{)(]+.*$");
|
||||
|
||||
// The kind of match we are doing on a column:
|
||||
/** The kind of match we are doing on a column: */
|
||||
private static enum MATCH_TYPE {
|
||||
/** Just check the column family name */
|
||||
FAMILY_ONLY,
|
||||
|
@ -51,11 +51,13 @@ public abstract class HAbstractScanner implements HInternalScannerInterface {
|
|||
SIMPLE
|
||||
}
|
||||
|
||||
// This class provides column matching functions that are more sophisticated
|
||||
// than a simple string compare. There are three types of matching:
|
||||
// 1. Match on the column family name only
|
||||
// 2. Match on the column family + column key regex
|
||||
// 3. Simple match: compare column family + column key literally
|
||||
/**
|
||||
* This class provides column matching functions that are more sophisticated
|
||||
* than a simple string compare. There are three types of matching:
|
||||
* 1. Match on the column family name only
|
||||
* 2. Match on the column family + column key regex
|
||||
* 3. Simple match: compare column family + column key literally
|
||||
*/
|
||||
private static class ColumnMatcher {
|
||||
private boolean wildCardmatch;
|
||||
private MATCH_TYPE matchType;
|
||||
|
@ -84,8 +86,7 @@ public abstract class HAbstractScanner implements HInternalScannerInterface {
|
|||
}
|
||||
}
|
||||
|
||||
// Matching method
|
||||
|
||||
/** Matching method */
|
||||
boolean matches(Text c) throws IOException {
|
||||
if(this.matchType == MATCH_TYPE.SIMPLE) {
|
||||
return c.equals(this.col);
|
||||
|
@ -187,16 +188,12 @@ public abstract class HAbstractScanner implements HInternalScannerInterface {
|
|||
/** Mechanism used to shut down the whole scan */
|
||||
public abstract void close();
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.apache.hadoop.hbase.HInternalScannerInterface#isWildcardScanner()
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public boolean isWildcardScanner() {
|
||||
return this.wildcardMatch;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.apache.hadoop.hbase.HInternalScannerInterface#isMultipleMatchScanner()
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public boolean isMultipleMatchScanner() {
|
||||
return this.multipleMatchers;
|
||||
}
|
||||
|
|
|
@ -21,7 +21,11 @@ package org.apache.hadoop.hbase;
|
|||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
||||
/**
|
||||
* Adds HBase configuration files to a Configuration
|
||||
*/
|
||||
public class HBaseConfiguration extends Configuration {
|
||||
/** constructor */
|
||||
public HBaseConfiguration() {
|
||||
super();
|
||||
addDefaultResource("hbase-default.xml");
|
||||
|
|
|
@ -148,16 +148,12 @@ public class HColumnDescriptor implements WritableComparable {
|
|||
this.versionNumber = COLUMN_DESCRIPTOR_VERSION;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return - name of column family
|
||||
*/
|
||||
/** @return name of column family */
|
||||
public Text getName() {
|
||||
return name;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return - compression type being used for the column family
|
||||
*/
|
||||
/** @return compression type being used for the column family */
|
||||
public CompressionType getCompression() {
|
||||
CompressionType value = null;
|
||||
|
||||
|
@ -176,13 +172,12 @@ public class HColumnDescriptor implements WritableComparable {
|
|||
return value;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return - maximum number of versions
|
||||
*/
|
||||
/** @return maximum number of versions */
|
||||
public int getMaxVersions() {
|
||||
return this.maxVersions;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String toString() {
|
||||
String compression = "none";
|
||||
|
@ -205,11 +200,13 @@ public class HColumnDescriptor implements WritableComparable {
|
|||
+ (bloomFilterSpecified ? bloomFilter.toString() : "none") + ")";
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
return compareTo(obj) == 0;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = this.name.hashCode();
|
||||
|
@ -225,10 +222,9 @@ public class HColumnDescriptor implements WritableComparable {
|
|||
return result;
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
// Writable
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/** {@inheritDoc} */
|
||||
public void readFields(DataInput in) throws IOException {
|
||||
this.versionNumber = in.readByte();
|
||||
this.name.readFields(in);
|
||||
|
@ -244,6 +240,7 @@ public class HColumnDescriptor implements WritableComparable {
|
|||
}
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
public void write(DataOutput out) throws IOException {
|
||||
out.writeByte(this.versionNumber);
|
||||
this.name.write(out);
|
||||
|
@ -258,10 +255,9 @@ public class HColumnDescriptor implements WritableComparable {
|
|||
}
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
// Comparable
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/** {@inheritDoc} */
|
||||
public int compareTo(Object o) {
|
||||
// NOTE: we don't do anything with the version number yet.
|
||||
// Version numbers will come into play when we introduce an incompatible
|
||||
|
|
|
@ -113,7 +113,8 @@ public class HConnectionManager implements HConstants {
|
|||
// Known region HServerAddress.toString() -> HRegionInterface
|
||||
private HashMap<String, HRegionInterface> servers;
|
||||
|
||||
/** constructor
|
||||
/**
|
||||
* constructor
|
||||
* @param conf Configuration object
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
|
|
|
@ -23,50 +23,62 @@ import org.apache.hadoop.io.*;
|
|||
|
||||
import java.io.*;
|
||||
|
||||
/*******************************************************************************
|
||||
/**
|
||||
* A log value.
|
||||
*
|
||||
* These aren't sortable; you need to sort by the matching HLogKey.
|
||||
* The table and row are already identified in HLogKey.
|
||||
* This just indicates the column and value.
|
||||
******************************************************************************/
|
||||
*/
|
||||
public class HLogEdit implements Writable {
|
||||
private Text column = new Text();
|
||||
private byte [] val;
|
||||
private long timestamp;
|
||||
|
||||
/**
|
||||
* Default constructor used by Writable
|
||||
*/
|
||||
public HLogEdit() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a fully initialized HLogEdit
|
||||
* @param column column name
|
||||
* @param bval value
|
||||
* @param timestamp timestamp for modification
|
||||
*/
|
||||
public HLogEdit(Text column, byte [] bval, long timestamp) {
|
||||
this.column.set(column);
|
||||
this.val = bval;
|
||||
this.timestamp = timestamp;
|
||||
}
|
||||
|
||||
/** @return the column */
|
||||
public Text getColumn() {
|
||||
return this.column;
|
||||
}
|
||||
|
||||
/** @return the value */
|
||||
public byte [] getVal() {
|
||||
return this.val;
|
||||
}
|
||||
|
||||
/** @return the timestamp */
|
||||
public long getTimestamp() {
|
||||
return this.timestamp;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String toString() {
|
||||
return getColumn().toString() + " " + this.getTimestamp() + " " +
|
||||
new String(getVal()).trim();
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
// Writable
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/** {@inheritDoc} */
|
||||
public void write(DataOutput out) throws IOException {
|
||||
this.column.write(out);
|
||||
out.writeShort(this.val.length);
|
||||
|
@ -74,6 +86,7 @@ public class HLogEdit implements Writable {
|
|||
out.writeLong(timestamp);
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
public void readFields(DataInput in) throws IOException {
|
||||
this.column.readFields(in);
|
||||
this.val = new byte[in.readShort()];
|
||||
|
|
|
@ -30,35 +30,68 @@ import java.io.IOException;
|
|||
* tables.
|
||||
*/
|
||||
public interface HMasterInterface extends VersionedProtocol {
|
||||
public static final long versionID = 1L; // initial version
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
// Check to see if master is available
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
/** Interface version */
|
||||
public static final long versionID = 1L;
|
||||
|
||||
/** @return true if master is available */
|
||||
public boolean isMasterRunning();
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
// Admin tools would use these cmds
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* Creates a new table
|
||||
* @param desc table descriptor
|
||||
* @throws IOException
|
||||
*/
|
||||
public void createTable(HTableDescriptor desc) throws IOException;
|
||||
|
||||
/**
|
||||
* Deletes a table
|
||||
* @param tableName
|
||||
* @throws IOException
|
||||
*/
|
||||
public void deleteTable(Text tableName) throws IOException;
|
||||
|
||||
/**
|
||||
* Adds a column to the specified table
|
||||
* @param tableName
|
||||
* @param column column descriptor
|
||||
* @throws IOException
|
||||
*/
|
||||
public void addColumn(Text tableName, HColumnDescriptor column) throws IOException;
|
||||
|
||||
/**
|
||||
* Deletes a column from the specified table
|
||||
* @param tableName
|
||||
* @param columnName
|
||||
* @throws IOException
|
||||
*/
|
||||
public void deleteColumn(Text tableName, Text columnName) throws IOException;
|
||||
|
||||
/**
|
||||
* Puts the table on-line (only needed if table has been previously taken offline)
|
||||
* @param tableName
|
||||
* @throws IOException
|
||||
*/
|
||||
public void enableTable(Text tableName) throws IOException;
|
||||
|
||||
/**
|
||||
* Take table offline
|
||||
*
|
||||
* @param tableName
|
||||
* @throws IOException
|
||||
*/
|
||||
public void disableTable(Text tableName) throws IOException;
|
||||
|
||||
/**
|
||||
* Shutdown an HBase cluster.
|
||||
* @throws IOException
|
||||
*/
|
||||
public void shutdown() throws IOException;
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
// These are the method calls of last resort when trying to find an HRegion
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* Get the location of the root region
|
||||
* @return address of server that serves the root region
|
||||
*/
|
||||
public HServerAddress findRootRegion();
|
||||
}
|
|
@ -22,13 +22,30 @@ package org.apache.hadoop.hbase;
|
|||
import java.io.IOException;
|
||||
import org.apache.hadoop.ipc.VersionedProtocol;
|
||||
|
||||
/*******************************************************************************
|
||||
/**
|
||||
* HRegionServers interact with the HMasterRegionInterface to report on local
|
||||
* goings-on and to obtain data-handling instructions from the HMaster.
|
||||
*********************************************/
|
||||
*/
|
||||
public interface HMasterRegionInterface extends VersionedProtocol {
|
||||
/** Interface version number */
|
||||
public static final long versionID = 1L;
|
||||
|
||||
/**
|
||||
* Called when a region server first starts
|
||||
* @param info
|
||||
* @throws IOException
|
||||
*/
|
||||
public void regionServerStartup(HServerInfo info) throws IOException;
|
||||
|
||||
/**
|
||||
* Called to renew lease, tell master what the region server is doing and to
|
||||
* receive new instructions from the master
|
||||
*
|
||||
* @param info server's address and start code
|
||||
* @param msgs things the region server wants to tell the master
|
||||
* @return instructions from the master to the region server
|
||||
* @throws IOException
|
||||
*/
|
||||
public HMsg[] regionServerReport(HServerInfo info, HMsg msgs[])
|
||||
throws IOException;
|
||||
}
|
||||
|
|
|
@ -24,11 +24,24 @@ import org.apache.hadoop.io.*;
|
|||
import java.io.*;
|
||||
import java.util.*;
|
||||
|
||||
/*******************************************************************************
|
||||
* HScannerInterface iterates through a set of rows. It's implemented by several classes.
|
||||
******************************************************************************/
|
||||
/**
|
||||
* HScannerInterface iterates through a set of rows. It's implemented by
|
||||
* several classes.
|
||||
*/
|
||||
public interface HScannerInterface {
|
||||
/**
|
||||
* Get the next set of values
|
||||
* @param key will contain the row and timestamp upon return
|
||||
* @param results will contain an entry for each column family member and its value
|
||||
* @return true if data was returned
|
||||
* @throws IOException
|
||||
*/
|
||||
public boolean next(HStoreKey key, TreeMap<Text, byte[]> results)
|
||||
throws IOException;
|
||||
|
||||
/**
|
||||
* Closes a scanner and releases any resources it has allocated
|
||||
* @throws IOException
|
||||
*/
|
||||
public void close() throws IOException;
|
||||
}
|
||||
|
|
|
@ -23,51 +23,67 @@ import org.apache.hadoop.io.*;
|
|||
|
||||
import java.io.*;
|
||||
|
||||
/*******************************************************************************
|
||||
* HRSInfo contains metainfo about an HRegionServer, including details about the
|
||||
* source machine and load statistics.
|
||||
******************************************************************************/
|
||||
/**
|
||||
* HServerInfo contains metainfo about an HRegionServer, Currently it only
|
||||
* contains the server start code.
|
||||
*
|
||||
* In the future it will contain information about the source machine and
|
||||
* load statistics.
|
||||
*/
|
||||
public class HServerInfo implements Writable {
|
||||
private HServerAddress serverAddress;
|
||||
private long startCode;
|
||||
|
||||
/** default constructor - used by Writable */
|
||||
public HServerInfo() {
|
||||
this.serverAddress = new HServerAddress();
|
||||
this.startCode = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructs a fully initialized object
|
||||
* @param serverAddress
|
||||
* @param startCode
|
||||
*/
|
||||
public HServerInfo(HServerAddress serverAddress, long startCode) {
|
||||
this.serverAddress = new HServerAddress(serverAddress);
|
||||
this.startCode = startCode;
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a new object using another as input (like a copy constructor)
|
||||
* @param other
|
||||
*/
|
||||
public HServerInfo(HServerInfo other) {
|
||||
this.serverAddress = new HServerAddress(other.getServerAddress());
|
||||
this.startCode = other.getStartCode();
|
||||
}
|
||||
|
||||
|
||||
/** @return the server address */
|
||||
public HServerAddress getServerAddress() {
|
||||
return serverAddress;
|
||||
}
|
||||
|
||||
|
||||
/** @return the server start code */
|
||||
public long getStartCode() {
|
||||
return startCode;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String toString() {
|
||||
return "address: " + this.serverAddress + ", startcode: " + this.startCode;
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
// Writable
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/** {@inheritDoc} */
|
||||
public void readFields(DataInput in) throws IOException {
|
||||
this.serverAddress.readFields(in);
|
||||
this.startCode = in.readLong();
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
public void write(DataOutput out) throws IOException {
|
||||
this.serverAddress.write(out);
|
||||
out.writeLong(this.startCode);
|
||||
|
|
|
@ -29,14 +29,21 @@ import org.apache.hadoop.conf.*;
|
|||
import java.io.*;
|
||||
import java.util.*;
|
||||
|
||||
/*******************************************************************************
|
||||
/**
|
||||
* Each HStore maintains a bunch of different data files.
|
||||
*
|
||||
* An HStoreFile tracks 4 things: its parent dir, the region identifier, the
|
||||
* column family, and the file identifier. If you know those four things, you
|
||||
* know how to obtain the right HStoreFile.
|
||||
*
|
||||
* When merging or splitting HRegions, we might want to modify one of the
|
||||
* params for an HStoreFile (effectively moving it elsewhere).
|
||||
*
|
||||
* The filename is a mix of the parent dir, the region name, the column name,
|
||||
* and the file identifier.
|
||||
*
|
||||
* This class handles all that path-building stuff for you.
|
||||
******************************************************************************/
|
||||
*/
|
||||
public class HStoreFile implements HConstants, WritableComparable {
|
||||
private static final Log LOG = LogFactory.getLog(HStoreFile.class.getName());
|
||||
static final byte INFO_SEQ_NUM = 0;
|
||||
|
@ -53,14 +60,7 @@ public class HStoreFile implements HConstants, WritableComparable {
|
|||
long fileId;
|
||||
Configuration conf;
|
||||
|
||||
/**
|
||||
* An HStoreFile tracks 4 things: its parent dir, the region identifier, the
|
||||
* column family, and the file identifier. If you know those four things, you
|
||||
* know how to obtain the right HStoreFile.
|
||||
*
|
||||
* When merging or splitting HRegions, we might want to modify one of the
|
||||
* params for an HStoreFile (effectively moving it elsewhere).
|
||||
*/
|
||||
/** Constructor used by Writable */
|
||||
HStoreFile(Configuration conf) {
|
||||
this.conf = conf;
|
||||
this.dir = new Path(Path.CUR_DIR);
|
||||
|
@ -69,6 +69,14 @@ public class HStoreFile implements HConstants, WritableComparable {
|
|||
this.fileId = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor that fully initializes the object
|
||||
* @param conf Configuration object
|
||||
* @param dir directory path
|
||||
* @param regionName name of the region
|
||||
* @param colFamily name of the column family
|
||||
* @param fileId file identifier
|
||||
*/
|
||||
HStoreFile(Configuration conf, Path dir, Text regionName,
|
||||
Text colFamily, long fileId) {
|
||||
|
||||
|
@ -79,31 +87,35 @@ public class HStoreFile implements HConstants, WritableComparable {
|
|||
this.fileId = fileId;
|
||||
}
|
||||
|
||||
// Get the individual components
|
||||
|
||||
/** @return the directory path */
|
||||
Path getDir() {
|
||||
return dir;
|
||||
}
|
||||
|
||||
|
||||
/** @return the region name */
|
||||
Text getRegionName() {
|
||||
return regionName;
|
||||
}
|
||||
|
||||
|
||||
/** @return the column family */
|
||||
Text getColFamily() {
|
||||
return colFamily;
|
||||
}
|
||||
|
||||
|
||||
/** @return the file identifier */
|
||||
long fileId() {
|
||||
return fileId;
|
||||
}
|
||||
|
||||
// Build full filenames from those components
|
||||
|
||||
/** @return path for MapFile */
|
||||
Path getMapFilePath() {
|
||||
return new Path(HStoreFile.getMapDir(dir, regionName, colFamily),
|
||||
HSTORE_DATFILE_PREFIX + fileId);
|
||||
}
|
||||
|
||||
/** @return path for info file */
|
||||
Path getInfoFilePath() {
|
||||
return new Path(HStoreFile.getInfoDir(dir, regionName, colFamily),
|
||||
HSTORE_INFOFILE_PREFIX + fileId);
|
||||
|
@ -111,34 +123,41 @@ public class HStoreFile implements HConstants, WritableComparable {
|
|||
|
||||
// Static methods to build partial paths to internal directories. Useful for
|
||||
// HStore construction and log-rebuilding.
|
||||
|
||||
|
||||
/** @return the map directory path */
|
||||
static Path getMapDir(Path dir, Text regionName, Text colFamily) {
|
||||
return new Path(dir, new Path(HREGIONDIR_PREFIX + regionName,
|
||||
new Path(colFamily.toString(), HSTORE_DATFILE_DIR)));
|
||||
}
|
||||
|
||||
/** @return the info directory path */
|
||||
static Path getInfoDir(Path dir, Text regionName, Text colFamily) {
|
||||
return new Path(dir, new Path(HREGIONDIR_PREFIX + regionName,
|
||||
new Path(colFamily.toString(), HSTORE_INFO_DIR)));
|
||||
}
|
||||
|
||||
|
||||
/** @return the bloom filter directory path */
|
||||
static Path getFilterDir(Path dir, Text regionName, Text colFamily) {
|
||||
return new Path(dir, new Path(HREGIONDIR_PREFIX + regionName,
|
||||
new Path(colFamily.toString(), HSTORE_FILTER_DIR)));
|
||||
}
|
||||
|
||||
/** @return the HStore directory path */
|
||||
static Path getHStoreDir(Path dir, Text regionName, Text colFamily) {
|
||||
return new Path(dir, new Path(HREGIONDIR_PREFIX + regionName,
|
||||
colFamily.toString()));
|
||||
}
|
||||
|
||||
/** @return the HRegion directory path */
|
||||
static Path getHRegionDir(Path dir, Text regionName) {
|
||||
return new Path(dir, new Path(HREGIONDIR_PREFIX + regionName));
|
||||
}
|
||||
|
||||
/**
|
||||
* Obtain a brand-new randomly-named HStoreFile. Checks the existing
|
||||
* filesystem if the file already exists.
|
||||
* @return a brand-new randomly-named HStoreFile.
|
||||
*
|
||||
* Checks the filesystem to determine if the file already exists. If so, it
|
||||
* will keep generating names until it generates a name that does not exist.
|
||||
*/
|
||||
static HStoreFile obtainNewHStoreFile(Configuration conf, Path dir,
|
||||
Text regionName, Text colFamily, FileSystem fs) throws IOException {
|
||||
|
@ -157,10 +176,18 @@ public class HStoreFile implements HConstants, WritableComparable {
|
|||
}
|
||||
|
||||
/**
|
||||
* Create a series of HStoreFiles loaded from the given directory.
|
||||
* Creates a series of HStoreFiles loaded from the given directory.
|
||||
*
|
||||
* There must be a matching 'mapdir' and 'loginfo' pair of files.
|
||||
* If only one exists, we'll delete it.
|
||||
*
|
||||
* @param conf Configuration object
|
||||
* @param dir directory path
|
||||
* @param regionName region name
|
||||
* @param colFamily column family
|
||||
* @param fs file system
|
||||
* @return Vector of HStoreFiles
|
||||
* @throws IOException
|
||||
*/
|
||||
static Vector<HStoreFile> loadHStoreFiles(Configuration conf, Path dir,
|
||||
Text regionName, Text colFamily, FileSystem fs) throws IOException {
|
||||
|
@ -173,8 +200,12 @@ public class HStoreFile implements HConstants, WritableComparable {
|
|||
String name = datfiles[i].getName();
|
||||
|
||||
if(name.startsWith(HSTORE_DATFILE_PREFIX)) {
|
||||
Long fileId = Long.parseLong(name.substring(HSTORE_DATFILE_PREFIX.length()));
|
||||
HStoreFile curfile = new HStoreFile(conf, dir, regionName, colFamily, fileId);
|
||||
Long fileId =
|
||||
Long.parseLong(name.substring(HSTORE_DATFILE_PREFIX.length()));
|
||||
|
||||
HStoreFile curfile =
|
||||
new HStoreFile(conf, dir, regionName, colFamily, fileId);
|
||||
|
||||
Path mapfile = curfile.getMapFilePath();
|
||||
Path infofile = curfile.getInfoFilePath();
|
||||
|
||||
|
@ -193,8 +224,12 @@ public class HStoreFile implements HConstants, WritableComparable {
|
|||
String name = infofiles[i].getName();
|
||||
|
||||
if(name.startsWith(HSTORE_INFOFILE_PREFIX)) {
|
||||
long fileId = Long.parseLong(name.substring(HSTORE_INFOFILE_PREFIX.length()));
|
||||
HStoreFile curfile = new HStoreFile(conf, dir, regionName, colFamily, fileId);
|
||||
long fileId =
|
||||
Long.parseLong(name.substring(HSTORE_INFOFILE_PREFIX.length()));
|
||||
|
||||
HStoreFile curfile =
|
||||
new HStoreFile(conf, dir, regionName, colFamily, fileId);
|
||||
|
||||
Path mapfile = curfile.getMapFilePath();
|
||||
|
||||
if(! fs.exists(mapfile)) {
|
||||
|
@ -205,31 +240,40 @@ public class HStoreFile implements HConstants, WritableComparable {
|
|||
return results;
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
// File handling
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/**
|
||||
* Break this HStoreFile file into two new parts, which live in different
|
||||
* brand-new HRegions.
|
||||
*
|
||||
* @param midKey the key which will be the starting key of the second region
|
||||
* @param dstA the file which will contain keys from the start of the source
|
||||
* @param dstB the file which will contain keys from midKey to end of source
|
||||
* @param fs file system
|
||||
* @param c configuration
|
||||
* @throws IOException
|
||||
*/
|
||||
void splitStoreFile(Text midKey, HStoreFile dstA, HStoreFile dstB,
|
||||
FileSystem fs, Configuration c)
|
||||
throws IOException {
|
||||
FileSystem fs, Configuration c) throws IOException {
|
||||
|
||||
// Copy the appropriate tuples to one MapFile or the other.
|
||||
|
||||
MapFile.Reader in = new MapFile.Reader(fs, getMapFilePath().toString(), c);
|
||||
try {
|
||||
MapFile.Writer outA = new MapFile.Writer(c, fs,
|
||||
dstA.getMapFilePath().toString(), HStoreKey.class,
|
||||
ImmutableBytesWritable.class);
|
||||
|
||||
try {
|
||||
MapFile.Writer outB = new MapFile.Writer(c, fs,
|
||||
dstB.getMapFilePath().toString(), HStoreKey.class,
|
||||
ImmutableBytesWritable.class);
|
||||
|
||||
try {
|
||||
long count = 0;
|
||||
HStoreKey readkey = new HStoreKey();
|
||||
ImmutableBytesWritable readval = new ImmutableBytesWritable();
|
||||
|
||||
while(in.next(readkey, readval)) {
|
||||
if(readkey.getRow().compareTo(midKey) < 0) {
|
||||
outA.append(readkey, readval);
|
||||
|
@ -243,12 +287,15 @@ public class HStoreFile implements HConstants, WritableComparable {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
} finally {
|
||||
outB.close();
|
||||
}
|
||||
|
||||
} finally {
|
||||
outA.close();
|
||||
}
|
||||
|
||||
} finally {
|
||||
in.close();
|
||||
}
|
||||
|
@ -260,8 +307,12 @@ public class HStoreFile implements HConstants, WritableComparable {
|
|||
}
|
||||
|
||||
/**
|
||||
* Write to this HStoreFile with all the contents of the given source HStoreFiles.
|
||||
* We are merging multiple regions into a single new one.
|
||||
* Merges the contents of the given source HStoreFiles into a single new one.
|
||||
*
|
||||
* @param srcFiles files to be merged
|
||||
* @param fs file system
|
||||
* @param conf configuration object
|
||||
* @throws IOException
|
||||
*/
|
||||
void mergeStoreFiles(Vector<HStoreFile> srcFiles, FileSystem fs,
|
||||
Configuration conf) throws IOException {
|
||||
|
@ -273,9 +324,9 @@ public class HStoreFile implements HConstants, WritableComparable {
|
|||
HStoreKey.class, ImmutableBytesWritable.class);
|
||||
|
||||
try {
|
||||
for(Iterator<HStoreFile> it = srcFiles.iterator(); it.hasNext(); ) {
|
||||
HStoreFile src = it.next();
|
||||
MapFile.Reader in = new MapFile.Reader(fs, src.getMapFilePath().toString(), conf);
|
||||
for(HStoreFile src: srcFiles) {
|
||||
MapFile.Reader in =
|
||||
new MapFile.Reader(fs, src.getMapFilePath().toString(), conf);
|
||||
|
||||
try {
|
||||
HStoreKey readkey = new HStoreKey();
|
||||
|
@ -283,6 +334,7 @@ public class HStoreFile implements HConstants, WritableComparable {
|
|||
while(in.next(readkey, readval)) {
|
||||
out.append(readkey, readval);
|
||||
}
|
||||
|
||||
} finally {
|
||||
in.close();
|
||||
}
|
||||
|
@ -293,6 +345,7 @@ public class HStoreFile implements HConstants, WritableComparable {
|
|||
}
|
||||
|
||||
// Build a unified InfoFile from the source InfoFiles.
|
||||
|
||||
long unifiedSeqId = -1;
|
||||
for(Iterator<HStoreFile> it = srcFiles.iterator(); it.hasNext(); ) {
|
||||
HStoreFile hsf = it.next();
|
||||
|
@ -304,7 +357,13 @@ public class HStoreFile implements HConstants, WritableComparable {
|
|||
writeInfo(fs, unifiedSeqId);
|
||||
}
|
||||
|
||||
/** Read in an info file, give it a unique ID. */
|
||||
/**
|
||||
* Reads in an info file, and gives it a unique ID.
|
||||
*
|
||||
* @param fs file system
|
||||
* @return new unique id
|
||||
* @throws IOException
|
||||
*/
|
||||
long loadInfo(FileSystem fs) throws IOException {
|
||||
Path p = getInfoFilePath();
|
||||
DataInputStream in = new DataInputStream(fs.open(p));
|
||||
|
@ -319,7 +378,13 @@ public class HStoreFile implements HConstants, WritableComparable {
|
|||
}
|
||||
}
|
||||
|
||||
/** Write the file-identifier to disk */
|
||||
/**
|
||||
* Writes the file-identifier to disk
|
||||
*
|
||||
* @param fs file system
|
||||
* @param infonum file id
|
||||
* @throws IOException
|
||||
*/
|
||||
void writeInfo(FileSystem fs, long infonum) throws IOException {
|
||||
Path p = getInfoFilePath();
|
||||
DataOutputStream out = new DataOutputStream(fs.create(p));
|
||||
|
@ -333,11 +398,13 @@ public class HStoreFile implements HConstants, WritableComparable {
|
|||
}
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
return this.compareTo(o) == 0;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = this.dir.hashCode();
|
||||
|
@ -347,13 +414,9 @@ public class HStoreFile implements HConstants, WritableComparable {
|
|||
return result;
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
// Writable
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.apache.hadoop.io.Writable#write(java.io.DataOutput)
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public void write(DataOutput out) throws IOException {
|
||||
out.writeUTF(dir.toString());
|
||||
regionName.write(out);
|
||||
|
@ -361,9 +424,7 @@ public class HStoreFile implements HConstants, WritableComparable {
|
|||
out.writeLong(fileId);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.apache.hadoop.io.Writable#readFields(java.io.DataInput)
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public void readFields(DataInput in) throws IOException {
|
||||
this.dir = new Path(in.readUTF());
|
||||
this.regionName.readFields(in);
|
||||
|
@ -371,13 +432,9 @@ public class HStoreFile implements HConstants, WritableComparable {
|
|||
this.fileId = in.readLong();
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
// Comparable
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.lang.Comparable#compareTo(java.lang.Object)
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public int compareTo(Object o) {
|
||||
HStoreFile other = (HStoreFile) o;
|
||||
int result = this.dir.compareTo(other.dir);
|
||||
|
|
|
@ -147,16 +147,14 @@ public class HStoreKey implements WritableComparable {
|
|||
this.timestamp = timestamp;
|
||||
}
|
||||
|
||||
/**
|
||||
* @return Approximate size in bytes of this key.
|
||||
*/
|
||||
/** @return Approximate size in bytes of this key. */
|
||||
public long getSize() {
|
||||
return this.row.getLength() + this.column.getLength() +
|
||||
8 /* There is no sizeof in java. Presume long is 8 (64bit machine)*/;
|
||||
}
|
||||
|
||||
/**
|
||||
* Construct a new HStoreKey from another
|
||||
* Constructs a new HStoreKey from another
|
||||
*
|
||||
* @param other the source key
|
||||
*/
|
||||
|
@ -218,6 +216,7 @@ public class HStoreKey implements WritableComparable {
|
|||
}
|
||||
|
||||
/**
|
||||
* Compares the row and column of two keys
|
||||
* @param other Key to compare against. Compares row and column.
|
||||
* @return True if same row and column.
|
||||
* @see #matchesWithoutColumn(HStoreKey)
|
||||
|
@ -229,6 +228,8 @@ public class HStoreKey implements WritableComparable {
|
|||
}
|
||||
|
||||
/**
|
||||
* Compares the row and timestamp of two keys
|
||||
*
|
||||
* @param other Key to copmare against. Compares row and timestamp.
|
||||
*
|
||||
* @return True if same row and timestamp is greater than <code>other</code>
|
||||
|
@ -241,6 +242,8 @@ public class HStoreKey implements WritableComparable {
|
|||
}
|
||||
|
||||
/**
|
||||
* Compares the row and column family of two keys
|
||||
*
|
||||
* @param that Key to compare against. Compares row and column family
|
||||
*
|
||||
* @return true if same row and column family
|
||||
|
@ -255,16 +258,19 @@ public class HStoreKey implements WritableComparable {
|
|||
compareTo(extractFamily(that.getColumn())) == 0;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String toString() {
|
||||
return row.toString() + "/" + column.toString() + "/" + timestamp;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
return compareTo(obj) == 0;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = this.row.hashCode();
|
||||
|
@ -273,13 +279,9 @@ public class HStoreKey implements WritableComparable {
|
|||
return result;
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
// Comparable
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.lang.Comparable#compareTo(java.lang.Object)
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public int compareTo(Object o) {
|
||||
HStoreKey other = (HStoreKey) o;
|
||||
int result = this.row.compareTo(other.row);
|
||||
|
@ -296,22 +298,16 @@ public class HStoreKey implements WritableComparable {
|
|||
return result;
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
// Writable
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.apache.hadoop.io.Writable#write(java.io.DataOutput)
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public void write(DataOutput out) throws IOException {
|
||||
row.write(out);
|
||||
column.write(out);
|
||||
out.writeLong(timestamp);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.apache.hadoop.io.Writable#readFields(java.io.DataInput)
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public void readFields(DataInput in) throws IOException {
|
||||
row.readFields(in);
|
||||
column.readFields(in);
|
||||
|
|
|
@ -78,7 +78,7 @@ public class HTableDescriptor implements WritableComparable {
|
|||
}
|
||||
|
||||
/**
|
||||
* Add a column family.
|
||||
* Adds a column family.
|
||||
* @param family HColumnDescriptor of familyto add.
|
||||
*/
|
||||
public void addFamily(HColumnDescriptor family) {
|
||||
|
@ -95,7 +95,8 @@ public class HTableDescriptor implements WritableComparable {
|
|||
return families.containsKey(family);
|
||||
}
|
||||
|
||||
/** All the column families in this table.
|
||||
/**
|
||||
* All the column families in this table.
|
||||
*
|
||||
* TODO: What is this used for? Seems Dangerous to let people play with our
|
||||
* private members.
|
||||
|
@ -106,16 +107,19 @@ public class HTableDescriptor implements WritableComparable {
|
|||
return families;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String toString() {
|
||||
return "name: " + this.name.toString() + ", families: " + this.families;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public boolean equals(Object obj) {
|
||||
return compareTo(obj) == 0;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public int hashCode() {
|
||||
// TODO: Cache.
|
||||
|
@ -128,10 +132,9 @@ public class HTableDescriptor implements WritableComparable {
|
|||
return result;
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
// Writable
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/** {@inheritDoc} */
|
||||
public void write(DataOutput out) throws IOException {
|
||||
name.write(out);
|
||||
out.writeInt(families.size());
|
||||
|
@ -141,6 +144,7 @@ public class HTableDescriptor implements WritableComparable {
|
|||
}
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
public void readFields(DataInput in) throws IOException {
|
||||
this.name.readFields(in);
|
||||
int numCols = in.readInt();
|
||||
|
@ -152,10 +156,9 @@ public class HTableDescriptor implements WritableComparable {
|
|||
}
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
// Comparable
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/** {@inheritDoc} */
|
||||
public int compareTo(Object o) {
|
||||
HTableDescriptor other = (HTableDescriptor) o;
|
||||
int result = name.compareTo(other.name);
|
||||
|
|
|
@ -21,12 +21,20 @@ package org.apache.hadoop.hbase;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Thrown when an invalid column name is encountered
|
||||
*/
|
||||
public class InvalidColumnNameException extends IOException {
|
||||
private static final long serialVersionUID = 1L << 29 - 1L;
|
||||
/** default constructor */
|
||||
public InvalidColumnNameException() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param s message
|
||||
*/
|
||||
public InvalidColumnNameException(String s) {
|
||||
super(s);
|
||||
}
|
||||
|
|
|
@ -21,12 +21,20 @@ package org.apache.hadoop.hbase;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Thrown when a locking error is encountered
|
||||
*/
|
||||
public class LockException extends IOException {
|
||||
private static final long serialVersionUID = 1L << 13 - 1L;
|
||||
/** default constructor */
|
||||
public LockException() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param s message
|
||||
*/
|
||||
public LockException(String s) {
|
||||
super(s);
|
||||
}
|
||||
|
|
|
@ -21,12 +21,20 @@ package org.apache.hadoop.hbase;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Thrown if the master is not running
|
||||
*/
|
||||
public class MasterNotRunningException extends IOException {
|
||||
private static final long serialVersionUID = 1L << 23 - 1L;
|
||||
/** default constructor */
|
||||
public MasterNotRunningException() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param s message
|
||||
*/
|
||||
public MasterNotRunningException(String s) {
|
||||
super(s);
|
||||
}
|
||||
|
|
|
@ -21,13 +21,21 @@ package org.apache.hadoop.hbase;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Thrown when no region server can be found for a region
|
||||
*/
|
||||
public class NoServerForRegionException extends IOException {
|
||||
private static final long serialVersionUID = 1L << 11 - 1L;
|
||||
|
||||
/** default constructor */
|
||||
public NoServerForRegionException() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param s message
|
||||
*/
|
||||
public NoServerForRegionException(String s) {
|
||||
super(s);
|
||||
}
|
||||
|
|
|
@ -21,12 +21,22 @@ package org.apache.hadoop.hbase;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Thrown by a region server if it is sent a request for a region it is not
|
||||
* serving.
|
||||
*/
|
||||
public class NotServingRegionException extends IOException {
|
||||
private static final long serialVersionUID = 1L << 17 - 1L;
|
||||
|
||||
/** default constructor */
|
||||
public NotServingRegionException() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param s message
|
||||
*/
|
||||
public NotServingRegionException(String s) {
|
||||
super(s);
|
||||
}
|
||||
|
|
|
@ -17,11 +17,21 @@ package org.apache.hadoop.hbase;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Thrown when a table exists but should not
|
||||
*/
|
||||
public class TableExistsException extends IOException {
|
||||
private static final long serialVersionUID = 1L << 7 - 1L;
|
||||
/** default constructor */
|
||||
public TableExistsException() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @param s message
|
||||
*/
|
||||
public TableExistsException(String s) {
|
||||
super(s);
|
||||
}
|
||||
|
|
|
@ -21,12 +21,20 @@ package org.apache.hadoop.hbase;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Thrown if a table should be offline but is not
|
||||
*/
|
||||
public class TableNotDisabledException extends IOException {
|
||||
private static final long serialVersionUID = 1L << 19 - 1L;
|
||||
/** default constructor */
|
||||
public TableNotDisabledException() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param s message
|
||||
*/
|
||||
public TableNotDisabledException(String s) {
|
||||
super(s);
|
||||
}
|
||||
|
|
|
@ -21,13 +21,21 @@ package org.apache.hadoop.hbase;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Thrown if a region server is passed an unknown scanner id
|
||||
*/
|
||||
public class UnknownScannerException extends IOException {
|
||||
private static final long serialVersionUID = 993179627856392526L;
|
||||
|
||||
/** constructor */
|
||||
public UnknownScannerException() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param s message
|
||||
*/
|
||||
public UnknownScannerException(String s) {
|
||||
super(s);
|
||||
}
|
||||
|
|
|
@ -21,13 +21,21 @@ package org.apache.hadoop.hbase;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Thrown when a request contains a key which is not part of this region
|
||||
*/
|
||||
public class WrongRegionException extends IOException {
|
||||
private static final long serialVersionUID = 993179627856392526L;
|
||||
|
||||
/** constructor */
|
||||
public WrongRegionException() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param s message
|
||||
*/
|
||||
public WrongRegionException(String s) {
|
||||
super(s);
|
||||
}
|
||||
|
|
|
@ -25,10 +25,16 @@ package org.apache.hadoop.hbase.filter;
|
|||
public class InvalidRowFilterException extends RuntimeException {
|
||||
private static final long serialVersionUID = 2667894046345657865L;
|
||||
|
||||
|
||||
/** constructor */
|
||||
public InvalidRowFilterException() {
|
||||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* constructor
|
||||
* @param s message
|
||||
*/
|
||||
public InvalidRowFilterException(String s) {
|
||||
super(s);
|
||||
}
|
||||
|
|
|
@ -81,11 +81,9 @@ public class PageRowFilter implements RowFilterInterface {
|
|||
rowsAccepted = 0;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
public void rowProcessed(boolean filtered, Text rowKey) {
|
||||
/** {@inheritDoc} */
|
||||
public void rowProcessed(boolean filtered,
|
||||
@SuppressWarnings("unused") Text rowKey) {
|
||||
if (!filtered) {
|
||||
this.rowsAccepted++;
|
||||
if (LOG.isDebugEnabled()) {
|
||||
|
|
|
@ -80,18 +80,13 @@ public class RegExpRowFilter implements RowFilterInterface {
|
|||
this.setColumnFilters(columnFilter);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
@SuppressWarnings("unused")
|
||||
public void rowProcessed(boolean filtered, Text rowKey) {
|
||||
//doesn't care
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public boolean processAlways() {
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -45,6 +45,7 @@ public interface RowFilterInterface extends Writable {
|
|||
* RowFilterSet with an OR operator.
|
||||
*
|
||||
* @see RowFilterSet
|
||||
* @param filtered
|
||||
* @param key
|
||||
*/
|
||||
void rowProcessed(boolean filtered, Text key);
|
||||
|
|
|
@ -38,8 +38,12 @@ import org.apache.hadoop.io.Text;
|
|||
*/
|
||||
public class RowFilterSet implements RowFilterInterface {
|
||||
|
||||
/** set operator */
|
||||
public static enum Operator {
|
||||
MUST_PASS_ALL, MUST_PASS_ONE
|
||||
/** !AND */
|
||||
MUST_PASS_ALL,
|
||||
/** !OR */
|
||||
MUST_PASS_ONE
|
||||
}
|
||||
|
||||
private Operator operator = Operator.MUST_PASS_ALL;
|
||||
|
@ -77,10 +81,7 @@ public class RowFilterSet implements RowFilterInterface {
|
|||
this.operator = operator;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public void validate(final Text[] columns) {
|
||||
for (RowFilterInterface filter : filters) {
|
||||
filter.validate(columns);
|
||||
|
@ -91,10 +92,7 @@ public class RowFilterSet implements RowFilterInterface {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public void reset() {
|
||||
for (RowFilterInterface filter : filters) {
|
||||
filter.reset();
|
||||
|
@ -105,10 +103,7 @@ public class RowFilterSet implements RowFilterInterface {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public void rowProcessed(boolean filtered, Text rowKey) {
|
||||
for (RowFilterInterface filter : filters) {
|
||||
filter.rowProcessed(filtered, rowKey);
|
||||
|
@ -119,10 +114,7 @@ public class RowFilterSet implements RowFilterInterface {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public boolean processAlways() {
|
||||
for (RowFilterInterface filter : filters) {
|
||||
if (filter.processAlways()) {
|
||||
|
@ -136,10 +128,7 @@ public class RowFilterSet implements RowFilterInterface {
|
|||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public boolean filterAllRemaining() {
|
||||
boolean result = operator == Operator.MUST_PASS_ONE;
|
||||
for (RowFilterInterface filter : filters) {
|
||||
|
@ -167,10 +156,7 @@ public class RowFilterSet implements RowFilterInterface {
|
|||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public boolean filter(final Text rowKey) {
|
||||
boolean resultFound = false;
|
||||
boolean result = operator == Operator.MUST_PASS_ONE;
|
||||
|
@ -205,10 +191,7 @@ public class RowFilterSet implements RowFilterInterface {
|
|||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public boolean filter(final Text rowKey, final Text colKey,
|
||||
final byte[] data) {
|
||||
boolean resultFound = false;
|
||||
|
@ -248,10 +231,7 @@ public class RowFilterSet implements RowFilterInterface {
|
|||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public boolean filterNotNull(final TreeMap<Text, byte[]> columns) {
|
||||
boolean resultFound = false;
|
||||
boolean result = operator == Operator.MUST_PASS_ONE;
|
||||
|
@ -286,10 +266,7 @@ public class RowFilterSet implements RowFilterInterface {
|
|||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public void readFields(final DataInput in) throws IOException {
|
||||
byte opByte = in.readByte();
|
||||
operator = Operator.values()[opByte];
|
||||
|
@ -323,10 +300,7 @@ public class RowFilterSet implements RowFilterInterface {
|
|||
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public void write(final DataOutput out) throws IOException {
|
||||
out.writeByte(operator.ordinal());
|
||||
out.writeInt(filters.size());
|
||||
|
|
|
@ -80,34 +80,23 @@ public class StopRowFilter implements RowFilterInterface {
|
|||
// Nothing to reset
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
@SuppressWarnings("unused")
|
||||
public void rowProcessed(boolean filtered, Text rowKey) {
|
||||
// Doesn't care
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public boolean processAlways() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public boolean filterAllRemaining() {
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public boolean filter(final Text rowKey) {
|
||||
boolean result = this.stopRowKey.compareTo(rowKey) <= 0;
|
||||
if (LOG.isDebugEnabled()) {
|
||||
|
@ -118,6 +107,8 @@ public class StopRowFilter implements RowFilterInterface {
|
|||
}
|
||||
|
||||
/**
|
||||
* {@inheritDoc}
|
||||
*
|
||||
* Because StopRowFilter does not examine column information, this method
|
||||
* defaults to calling the rowKey-only version of filter.
|
||||
*/
|
||||
|
@ -127,7 +118,8 @@ public class StopRowFilter implements RowFilterInterface {
|
|||
return filter(rowKey);
|
||||
}
|
||||
|
||||
/**
|
||||
/** {@inheritDoc}
|
||||
*
|
||||
* Because StopRowFilter does not examine column information, this method
|
||||
* defaults to calling filterAllRemaining().
|
||||
*
|
||||
|
@ -138,18 +130,12 @@ public class StopRowFilter implements RowFilterInterface {
|
|||
return filterAllRemaining();
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public void readFields(DataInput in) throws IOException {
|
||||
stopRowKey = new Text(in.readUTF());
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public void write(DataOutput out) throws IOException {
|
||||
out.writeUTF(stopRowKey.toString());
|
||||
}
|
||||
|
|
|
@ -50,6 +50,10 @@ public class WhileMatchRowFilter implements RowFilterInterface {
|
|||
super();
|
||||
}
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
* @param filter
|
||||
*/
|
||||
public WhileMatchRowFilter(RowFilterInterface filter) {
|
||||
this.filter = filter;
|
||||
}
|
||||
|
@ -63,10 +67,7 @@ public class WhileMatchRowFilter implements RowFilterInterface {
|
|||
return this.filter;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public void reset() {
|
||||
if (LOG.isDebugEnabled()) {
|
||||
LOG.debug("Resetting.");
|
||||
|
@ -75,10 +76,7 @@ public class WhileMatchRowFilter implements RowFilterInterface {
|
|||
this.filter.reset();
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public boolean processAlways() {
|
||||
return true;
|
||||
}
|
||||
|
@ -94,10 +92,7 @@ public class WhileMatchRowFilter implements RowFilterInterface {
|
|||
return this.filterAllRemaining || this.filter.filterAllRemaining();
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public boolean filter(final Text rowKey) {
|
||||
changeFAR(this.filter.filter(rowKey));
|
||||
boolean result = filterAllRemaining();
|
||||
|
@ -107,10 +102,7 @@ public class WhileMatchRowFilter implements RowFilterInterface {
|
|||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public boolean filter(final Text rowKey, final Text colKey,
|
||||
final byte[] data) {
|
||||
changeFAR(this.filter.filter(rowKey, colKey, data));
|
||||
|
@ -122,10 +114,7 @@ public class WhileMatchRowFilter implements RowFilterInterface {
|
|||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public boolean filterNotNull(final TreeMap<Text, byte[]> columns) {
|
||||
changeFAR(this.filter.filterNotNull(columns));
|
||||
boolean result = filterAllRemaining();
|
||||
|
@ -150,26 +139,17 @@ public class WhileMatchRowFilter implements RowFilterInterface {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public void rowProcessed(boolean filtered, Text rowKey) {
|
||||
this.filter.rowProcessed(filtered, rowKey);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public void validate(Text[] columns) {
|
||||
this.filter.validate(columns);
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public void readFields(DataInput in) throws IOException {
|
||||
String className = in.readUTF();
|
||||
|
||||
|
@ -193,10 +173,7 @@ public class WhileMatchRowFilter implements RowFilterInterface {
|
|||
}
|
||||
}
|
||||
|
||||
/**
|
||||
*
|
||||
* {@inheritDoc}
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public void write(DataOutput out) throws IOException {
|
||||
out.writeUTF(this.filter.getClass().getName());
|
||||
this.filter.write(out);
|
||||
|
|
|
@ -88,7 +88,7 @@ public class ImmutableBytesWritable implements WritableComparable {
|
|||
}
|
||||
|
||||
/**
|
||||
* Get the current size of the buffer.
|
||||
* @return the current size of the buffer.
|
||||
*/
|
||||
public int getSize() {
|
||||
if (this.bytes == null) {
|
||||
|
@ -99,13 +99,13 @@ public class ImmutableBytesWritable implements WritableComparable {
|
|||
}
|
||||
|
||||
|
||||
// inherit javadoc
|
||||
/** {@inheritDoc} */
|
||||
public void readFields(final DataInput in) throws IOException {
|
||||
this.bytes = new byte[in.readInt()];
|
||||
in.readFully(this.bytes, 0, this.bytes.length);
|
||||
}
|
||||
|
||||
// inherit javadoc
|
||||
/** {@inheritDoc} */
|
||||
public void write(final DataOutput out) throws IOException {
|
||||
out.writeInt(this.bytes.length);
|
||||
out.write(this.bytes, 0, this.bytes.length);
|
||||
|
@ -113,6 +113,8 @@ public class ImmutableBytesWritable implements WritableComparable {
|
|||
|
||||
// Below methods copied from BytesWritable
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public int hashCode() {
|
||||
return WritableComparator.hashBytes(bytes, this.bytes.length);
|
||||
}
|
||||
|
@ -127,6 +129,12 @@ public class ImmutableBytesWritable implements WritableComparable {
|
|||
return compareTo(((ImmutableBytesWritable)right_obj).get());
|
||||
}
|
||||
|
||||
/**
|
||||
* Compares the bytes in this object to the specified byte array
|
||||
* @param that
|
||||
* @return Positive if left is bigger than right, 0 if they are equal, and
|
||||
* negative if left is smaller than right.
|
||||
*/
|
||||
public int compareTo(final byte [] that) {
|
||||
int diff = this.bytes.length - that.length;
|
||||
return (diff != 0)?
|
||||
|
@ -135,9 +143,8 @@ public class ImmutableBytesWritable implements WritableComparable {
|
|||
0, that.length);
|
||||
}
|
||||
|
||||
/**
|
||||
* Are the two byte sequences equal?
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public boolean equals(Object right_obj) {
|
||||
if (right_obj instanceof ImmutableBytesWritable) {
|
||||
return compareTo(right_obj) == 0;
|
||||
|
@ -145,9 +152,8 @@ public class ImmutableBytesWritable implements WritableComparable {
|
|||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Generate the stream of bytes as hex pairs separated by ' '.
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String toString() {
|
||||
StringBuffer sb = new StringBuffer(3*this.bytes.length);
|
||||
for (int idx = 0; idx < this.bytes.length; idx++) {
|
||||
|
@ -170,14 +176,14 @@ public class ImmutableBytesWritable implements WritableComparable {
|
|||
public static class Comparator extends WritableComparator {
|
||||
private BytesWritable.Comparator comparator =
|
||||
new BytesWritable.Comparator();
|
||||
|
||||
|
||||
/** constructor */
|
||||
public Comparator() {
|
||||
super(ImmutableBytesWritable.class);
|
||||
}
|
||||
|
||||
/**
|
||||
* Compare the buffers in serialized form.
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public int compare(byte[] b1, int s1, int l1, byte[] b2, int s2, int l2) {
|
||||
return comparator.compare(b1, s1, l1, b2, s2, l2);
|
||||
}
|
||||
|
|
|
@ -56,22 +56,16 @@ public class KeyedData implements Writable {
|
|||
return data;
|
||||
}
|
||||
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
// Writable
|
||||
//////////////////////////////////////////////////////////////////////////////
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.apache.hadoop.io.Writable#write(java.io.DataOutput)
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public void write(DataOutput out) throws IOException {
|
||||
key.write(out);
|
||||
out.writeInt(this.data.length);
|
||||
out.write(this.data);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.apache.hadoop.io.Writable#readFields(java.io.DataInput)
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public void readFields(DataInput in) throws IOException {
|
||||
key.readFields(in);
|
||||
this.data = new byte[in.readInt()];
|
||||
|
|
|
@ -61,6 +61,7 @@ public class KeyedDataArrayWritable implements Writable {
|
|||
|
||||
// Writable
|
||||
|
||||
/** {@inheritDoc} */
|
||||
public void readFields(DataInput in) throws IOException {
|
||||
int len = in.readInt();
|
||||
m_data = new KeyedData[len];
|
||||
|
@ -70,6 +71,7 @@ public class KeyedDataArrayWritable implements Writable {
|
|||
}
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
public void write(DataOutput out) throws IOException {
|
||||
int len = m_data.length;
|
||||
out.writeInt(len);
|
||||
|
|
|
@ -66,9 +66,7 @@ public class GroupingTableMap extends TableMap {
|
|||
job.set(GROUP_COLUMNS, groupColumns);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.apache.hadoop.hbase.mapred.TableMap#configure(org.apache.hadoop.mapred.JobConf)
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void configure(JobConf job) {
|
||||
super.configure(job);
|
||||
|
|
|
@ -70,9 +70,7 @@ public abstract class TableMap extends MapReduceBase implements Mapper {
|
|||
job.set(TableInputFormat.COLUMN_LIST, columns);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.apache.hadoop.mapred.MapReduceBase#configure(org.apache.hadoop.mapred.JobConf)
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void configure(JobConf job) {
|
||||
super.configure(job);
|
||||
|
|
|
@ -69,40 +69,33 @@ public class TableSplit implements InputSplit {
|
|||
return m_endRow;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.apache.hadoop.mapred.InputSplit#getLength()
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public long getLength() {
|
||||
// Not clear how to obtain this... seems to be used only for sorting splits
|
||||
return 0;
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.apache.hadoop.mapred.InputSplit#getLocations()
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public String[] getLocations() {
|
||||
// Return a random node from the cluster for now
|
||||
return new String[] { };
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.apache.hadoop.io.Writable#readFields(java.io.DataInput)
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public void readFields(DataInput in) throws IOException {
|
||||
m_tableName.readFields(in);
|
||||
m_startRow.readFields(in);
|
||||
m_endRow.readFields(in);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.apache.hadoop.io.Writable#write(java.io.DataOutput)
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public void write(DataOutput out) throws IOException {
|
||||
m_tableName.write(out);
|
||||
m_startRow.write(out);
|
||||
m_endRow.write(out);
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String toString() {
|
||||
return m_tableName +"," + m_startRow + "," + m_endRow;
|
||||
|
|
|
@ -89,6 +89,7 @@ public class BloomFilter extends Filter {
|
|||
vector = new boolean[this.vectorSize];
|
||||
}//end constructor
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void add(Key key) {
|
||||
if(key == null) {
|
||||
|
@ -103,6 +104,7 @@ public class BloomFilter extends Filter {
|
|||
}
|
||||
}//end add()
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void and(Filter filter){
|
||||
if(filter == null
|
||||
|
@ -119,6 +121,7 @@ public class BloomFilter extends Filter {
|
|||
}
|
||||
}//end and()
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public boolean membershipTest(Key key){
|
||||
if(key == null) {
|
||||
|
@ -135,6 +138,7 @@ public class BloomFilter extends Filter {
|
|||
return true;
|
||||
}//end memberhsipTest()
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void not(){
|
||||
for(int i = 0; i < vectorSize; i++) {
|
||||
|
@ -142,6 +146,7 @@ public class BloomFilter extends Filter {
|
|||
}
|
||||
}//end not()
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void or(Filter filter){
|
||||
if(filter == null
|
||||
|
@ -158,6 +163,7 @@ public class BloomFilter extends Filter {
|
|||
}
|
||||
}//end or()
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void xor(Filter filter){
|
||||
if(filter == null
|
||||
|
@ -175,7 +181,7 @@ public class BloomFilter extends Filter {
|
|||
}
|
||||
}//and xor()
|
||||
|
||||
/** Returns a String representation of <i>this</i> Bloom filter. */
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String toString(){
|
||||
StringBuilder res = new StringBuilder();
|
||||
|
@ -186,7 +192,7 @@ public class BloomFilter extends Filter {
|
|||
return res.toString();
|
||||
}//end toString()
|
||||
|
||||
/** Returns a shallow copy of <i>this</i> Bloom filter. */
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public Object clone(){
|
||||
BloomFilter bf = new BloomFilter(vectorSize, nbHash);
|
||||
|
@ -194,11 +200,13 @@ public class BloomFilter extends Filter {
|
|||
return bf;
|
||||
}//end clone()
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
return this.compareTo(o) == 0;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = super.hashCode();
|
||||
|
@ -210,6 +218,7 @@ public class BloomFilter extends Filter {
|
|||
|
||||
// Writable
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void write(DataOutput out) throws IOException {
|
||||
super.write(out);
|
||||
|
@ -218,6 +227,7 @@ public class BloomFilter extends Filter {
|
|||
}
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void readFields(DataInput in) throws IOException {
|
||||
super.readFields(in);
|
||||
|
@ -229,6 +239,7 @@ public class BloomFilter extends Filter {
|
|||
|
||||
// Comparable
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public int compareTo(Object o) {
|
||||
int result = super.compareTo(o);
|
||||
|
|
|
@ -84,6 +84,7 @@ public final class CountingBloomFilter extends Filter {
|
|||
vector = new byte[vectorSize];
|
||||
}//end constructor
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void add(Key key) {
|
||||
if(key == null) {
|
||||
|
@ -122,6 +123,7 @@ public final class CountingBloomFilter extends Filter {
|
|||
}
|
||||
}//end delete
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void and(Filter filter){
|
||||
if(filter == null
|
||||
|
@ -137,6 +139,7 @@ public final class CountingBloomFilter extends Filter {
|
|||
}
|
||||
}//end and()
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public boolean membershipTest(Key key){
|
||||
if(key == null) {
|
||||
|
@ -155,12 +158,14 @@ public final class CountingBloomFilter extends Filter {
|
|||
return true;
|
||||
}//end membershipTest()
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void not(){
|
||||
throw new UnsupportedOperationException("not() is undefined for "
|
||||
+ this.getClass().getName());
|
||||
}//end not()
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void or(Filter filter){
|
||||
if(filter == null
|
||||
|
@ -177,6 +182,7 @@ public final class CountingBloomFilter extends Filter {
|
|||
}
|
||||
}//end or()
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
@SuppressWarnings("unused")
|
||||
public void xor(Filter filter){
|
||||
|
@ -184,6 +190,7 @@ public final class CountingBloomFilter extends Filter {
|
|||
+ this.getClass().getName());
|
||||
}//end xor()
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String toString(){
|
||||
StringBuilder res = new StringBuilder();
|
||||
|
@ -198,6 +205,7 @@ public final class CountingBloomFilter extends Filter {
|
|||
return res.toString();
|
||||
}//end toString()
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public Object clone(){
|
||||
CountingBloomFilter cbf = new CountingBloomFilter(vectorSize, nbHash);
|
||||
|
@ -205,11 +213,13 @@ public final class CountingBloomFilter extends Filter {
|
|||
return cbf;
|
||||
}//end clone()
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
return this.compareTo(o) == 0;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = super.hashCode();
|
||||
|
@ -221,6 +231,7 @@ public final class CountingBloomFilter extends Filter {
|
|||
|
||||
// Writable
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void write(DataOutput out) throws IOException {
|
||||
super.write(out);
|
||||
|
@ -229,6 +240,7 @@ public final class CountingBloomFilter extends Filter {
|
|||
}
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void readFields(DataInput in) throws IOException {
|
||||
super.readFields(in);
|
||||
|
@ -240,6 +252,7 @@ public final class CountingBloomFilter extends Filter {
|
|||
|
||||
// Comparable
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public int compareTo(Object o) {
|
||||
int result = super.compareTo(o);
|
||||
|
|
|
@ -118,6 +118,7 @@ public class DynamicBloomFilter extends Filter {
|
|||
matrix[0] = new BloomFilter(this.vectorSize, this.nbHash);
|
||||
}//end constructor
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void add(Key key){
|
||||
if(key == null) {
|
||||
|
@ -137,6 +138,7 @@ public class DynamicBloomFilter extends Filter {
|
|||
currentNbRecord++;
|
||||
}//end add()
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void and(Filter filter) {
|
||||
if(filter == null
|
||||
|
@ -157,6 +159,7 @@ public class DynamicBloomFilter extends Filter {
|
|||
}
|
||||
}//end and()
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public boolean membershipTest(Key key){
|
||||
if(key == null) {
|
||||
|
@ -172,6 +175,7 @@ public class DynamicBloomFilter extends Filter {
|
|||
return false;
|
||||
}//end membershipTest()
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void not(){
|
||||
for(int i = 0; i < matrix.length; i++) {
|
||||
|
@ -179,6 +183,7 @@ public class DynamicBloomFilter extends Filter {
|
|||
}
|
||||
}//end not()
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void or(Filter filter){
|
||||
if(filter == null
|
||||
|
@ -198,6 +203,7 @@ public class DynamicBloomFilter extends Filter {
|
|||
}
|
||||
}//end or()
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void xor(Filter filter){
|
||||
if(filter == null
|
||||
|
@ -217,6 +223,7 @@ public class DynamicBloomFilter extends Filter {
|
|||
}
|
||||
}//end xor()
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public String toString(){
|
||||
StringBuilder res = new StringBuilder();
|
||||
|
@ -228,6 +235,7 @@ public class DynamicBloomFilter extends Filter {
|
|||
return res.toString();
|
||||
}//end toString()
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public Object clone(){
|
||||
DynamicBloomFilter dbf = new DynamicBloomFilter(vectorSize, nbHash, nr);
|
||||
|
@ -239,11 +247,13 @@ public class DynamicBloomFilter extends Filter {
|
|||
return dbf;
|
||||
}//end clone()
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
return this.compareTo(o) == 0;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = super.hashCode();
|
||||
|
@ -255,6 +265,7 @@ public class DynamicBloomFilter extends Filter {
|
|||
|
||||
// Writable
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void write(DataOutput out) throws IOException {
|
||||
super.write(out);
|
||||
|
@ -263,6 +274,7 @@ public class DynamicBloomFilter extends Filter {
|
|||
}
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void readFields(DataInput in) throws IOException {
|
||||
super.readFields(in);
|
||||
|
@ -274,6 +286,7 @@ public class DynamicBloomFilter extends Filter {
|
|||
|
||||
// Comparable
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public int compareTo(Object o) {
|
||||
int result = super.compareTo(o);
|
||||
|
|
|
@ -182,6 +182,7 @@ public abstract class Filter implements WritableComparable {
|
|||
}
|
||||
}//end add()
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = Integer.valueOf(this.nbHash).hashCode();
|
||||
|
@ -191,17 +192,13 @@ public abstract class Filter implements WritableComparable {
|
|||
|
||||
// Writable interface
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.apache.hadoop.io.Writable#write(java.io.DataOutput)
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public void write(DataOutput out) throws IOException {
|
||||
out.writeInt(this.nbHash);
|
||||
out.writeInt(this.vectorSize);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.apache.hadoop.io.Writable#readFields(java.io.DataInput)
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public void readFields(DataInput in) throws IOException {
|
||||
this.nbHash = in.readInt();
|
||||
this.vectorSize = in.readInt();
|
||||
|
@ -210,9 +207,7 @@ public abstract class Filter implements WritableComparable {
|
|||
|
||||
// Comparable interface
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.lang.Comparable#compareTo(java.lang.Object)
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public int compareTo(Object o) {
|
||||
Filter other = (Filter)o;
|
||||
int result = this.vectorSize - other.vectorSize;
|
||||
|
|
|
@ -121,11 +121,13 @@ public class Key implements WritableComparable {
|
|||
this.weight++;
|
||||
}//end incrementWeight()
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
return this.compareTo(o) == 0;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = 0;
|
||||
|
@ -138,18 +140,14 @@ public class Key implements WritableComparable {
|
|||
|
||||
// Writable
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.apache.hadoop.io.Writable#write(java.io.DataOutput)
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public void write(DataOutput out) throws IOException {
|
||||
out.writeInt(bytes.length);
|
||||
out.write(bytes);
|
||||
out.writeDouble(weight);
|
||||
}
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see org.apache.hadoop.io.Writable#readFields(java.io.DataInput)
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public void readFields(DataInput in) throws IOException {
|
||||
this.bytes = new byte[in.readInt()];
|
||||
in.readFully(this.bytes);
|
||||
|
@ -158,9 +156,7 @@ public class Key implements WritableComparable {
|
|||
|
||||
// Comparable
|
||||
|
||||
/* (non-Javadoc)
|
||||
* @see java.lang.Comparable#compareTo(java.lang.Object)
|
||||
*/
|
||||
/** {@inheritDoc} */
|
||||
public int compareTo(Object o) {
|
||||
Key other = (Key)o;
|
||||
|
||||
|
|
|
@ -105,6 +105,7 @@ implements RemoveScheme {
|
|||
createVector();
|
||||
}//end constructor
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void add(Key key){
|
||||
if(key == null) {
|
||||
|
@ -392,11 +393,13 @@ implements RemoveScheme {
|
|||
}//end for -i
|
||||
}//end createVector()
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public boolean equals(Object o) {
|
||||
return this.compareTo(o) == 0;
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public int hashCode() {
|
||||
int result = super.hashCode();
|
||||
|
@ -414,6 +417,7 @@ implements RemoveScheme {
|
|||
|
||||
// Writable
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void write(DataOutput out) throws IOException {
|
||||
super.write(out);
|
||||
|
@ -436,6 +440,7 @@ implements RemoveScheme {
|
|||
}
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void readFields(DataInput in) throws IOException {
|
||||
super.readFields(in);
|
||||
|
@ -465,6 +470,7 @@ implements RemoveScheme {
|
|||
|
||||
// Comparable
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public int compareTo(Object o) {
|
||||
int result = super.compareTo(o);
|
||||
|
|
|
@ -29,12 +29,26 @@ import org.apache.log4j.Level;
|
|||
import org.apache.log4j.Logger;
|
||||
import org.apache.log4j.PatternLayout;
|
||||
|
||||
/**
|
||||
* Initializes test environment
|
||||
*/
|
||||
public class StaticTestEnvironment {
|
||||
private StaticTestEnvironment() {}; // Not instantiable
|
||||
private StaticTestEnvironment() {} // Not instantiable
|
||||
|
||||
/** configuration parameter name for test directory */
|
||||
public static final String TEST_DIRECTORY_KEY = "test.build.data";
|
||||
|
||||
/** set to true if "DEBUGGING" is set in the environment */
|
||||
public static boolean debugging = false;
|
||||
|
||||
/**
|
||||
* Initializes parameters used in the test environment:
|
||||
*
|
||||
* Sets the configuration parameter TEST_DIRECTORY_KEY if not already set.
|
||||
* Sets the boolean debugging if "DEBUGGING" is set in the environment.
|
||||
* If debugging is enabled, reconfigures loggin so that the root log level is
|
||||
* set to WARN and the logging level for the package is set to DEBUG.
|
||||
*/
|
||||
@SuppressWarnings("unchecked")
|
||||
public static void initialize() {
|
||||
String value = null;
|
||||
|
|
|
@ -26,6 +26,7 @@ import junit.framework.TestCase;
|
|||
* Test comparing HBase objects.
|
||||
*/
|
||||
public class TestCompare extends TestCase {
|
||||
/** test case */
|
||||
public void testHRegionInfo() {
|
||||
HRegionInfo a = new HRegionInfo(1, new HTableDescriptor("a"), null, null);
|
||||
HRegionInfo b = new HRegionInfo(2, new HTableDescriptor("b"), null, null);
|
||||
|
|
|
@ -31,6 +31,7 @@ import org.apache.hadoop.io.SequenceFile.Reader;
|
|||
/** JUnit test case for HLog */
|
||||
public class TestHLog extends HBaseTestCase implements HConstants {
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void setUp() throws Exception {
|
||||
super.setUp();
|
||||
|
@ -103,6 +104,7 @@ public class TestHLog extends HBaseTestCase implements HConstants {
|
|||
}
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
public void tearDown() throws Exception {
|
||||
super.tearDown();
|
||||
|
|
|
@ -22,6 +22,9 @@ package org.apache.hadoop.hbase;
|
|||
/** Tests region merging */
|
||||
public class TestMergeMeta extends AbstractMergeTestBase {
|
||||
|
||||
/**
|
||||
* test case
|
||||
*/
|
||||
public void testMergeMeta() {
|
||||
try {
|
||||
HMerge.merge(conf, fs, HConstants.META_TABLE_NAME);
|
||||
|
|
|
@ -21,8 +21,15 @@ package org.apache.hadoop.hbase;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
/**
|
||||
* Tests merging a normal table's regions
|
||||
*/
|
||||
public class TestMergeTable extends AbstractMergeTestBase {
|
||||
|
||||
/**
|
||||
* Test case
|
||||
* @throws IOException
|
||||
*/
|
||||
public void testMergeTable() throws IOException {
|
||||
MiniHBaseCluster hCluster = new MiniHBaseCluster(conf, 1, dfsCluster);
|
||||
try {
|
||||
|
|
|
@ -22,7 +22,14 @@ import org.apache.hadoop.io.Text;
|
|||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
/**
|
||||
* Tests toString methods.
|
||||
*/
|
||||
public class TestToString extends TestCase {
|
||||
/**
|
||||
* tests toString methods on HSeverAddress, HServerInfo
|
||||
* @throws Exception
|
||||
*/
|
||||
public void testServerInfo() throws Exception {
|
||||
final String hostport = "127.0.0.1:9999";
|
||||
HServerAddress address = new HServerAddress(hostport);
|
||||
|
@ -32,6 +39,10 @@ public class TestToString extends TestCase {
|
|||
"address: " + hostport + ", startcode: " + -1);
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests toString method on HRegionInfo
|
||||
* @throws Exception
|
||||
*/
|
||||
public void testHRegionInfo() throws Exception {
|
||||
HTableDescriptor htd = new HTableDescriptor("hank");
|
||||
htd.addFamily(new HColumnDescriptor("hankfamily:"));
|
||||
|
|
|
@ -28,20 +28,33 @@ import org.apache.hadoop.io.Text;
|
|||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
/**
|
||||
* Tests for the page row filter
|
||||
*/
|
||||
public class TestPageRowFilter extends TestCase {
|
||||
|
||||
RowFilterInterface mainFilter;
|
||||
final int ROW_LIMIT = 3;
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
mainFilter = new PageRowFilter(ROW_LIMIT);
|
||||
}
|
||||
|
||||
/**
|
||||
* test page size filter
|
||||
* @throws Exception
|
||||
*/
|
||||
public void testPageSize() throws Exception {
|
||||
pageSizeTests(mainFilter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test filter serialization
|
||||
* @throws Exception
|
||||
*/
|
||||
public void testSerialization() throws Exception {
|
||||
// Decompose mainFilter to bytes.
|
||||
ByteArrayOutputStream stream = new ByteArrayOutputStream();
|
||||
|
|
|
@ -31,6 +31,9 @@ import junit.framework.TestCase;
|
|||
import org.apache.hadoop.hbase.HConstants;
|
||||
import org.apache.hadoop.io.Text;
|
||||
|
||||
/**
|
||||
* Tests for regular expression row filter
|
||||
*/
|
||||
public class TestRegExpRowFilter extends TestCase {
|
||||
TreeMap<Text, byte []> colvalues;
|
||||
RowFilterInterface mainFilter;
|
||||
|
@ -39,6 +42,7 @@ public class TestRegExpRowFilter extends TestCase {
|
|||
byte [] GOOD_BYTES = "abc".getBytes();
|
||||
final String HOST_PREFIX = "org.apache.site-";
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
|
@ -49,18 +53,34 @@ public class TestRegExpRowFilter extends TestCase {
|
|||
this.mainFilter = new RegExpRowFilter(HOST_PREFIX + ".*", colvalues);
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests filtering using a regex on the row key
|
||||
* @throws Exception
|
||||
*/
|
||||
public void testRegexOnRow() throws Exception {
|
||||
regexRowTests(mainFilter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests filtering using a regex on row and colum
|
||||
* @throws Exception
|
||||
*/
|
||||
public void testRegexOnRowAndColumn() throws Exception {
|
||||
regexRowColumnTests(mainFilter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Only return values that are not null
|
||||
* @throws Exception
|
||||
*/
|
||||
public void testFilterNotNull() throws Exception {
|
||||
filterNotNullTests(mainFilter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test serialization
|
||||
* @throws Exception
|
||||
*/
|
||||
public void testSerialization() throws Exception {
|
||||
// Decompose mainFilter to bytes.
|
||||
ByteArrayOutputStream stream = new ByteArrayOutputStream();
|
||||
|
|
|
@ -32,6 +32,9 @@ import org.apache.hadoop.io.Text;
|
|||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
/**
|
||||
* Tests filter sets
|
||||
*/
|
||||
public class TestRowFilterSet extends TestCase {
|
||||
|
||||
RowFilterInterface filterMPALL;
|
||||
|
@ -42,7 +45,9 @@ public class TestRowFilterSet extends TestCase {
|
|||
final byte[] GOOD_BYTES = "abc".getBytes();
|
||||
final byte[] BAD_BYTES = "def".getBytes();
|
||||
TreeMap<Text, byte[]> colvalues;
|
||||
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
|
||||
|
@ -62,14 +67,26 @@ public class TestRowFilterSet extends TestCase {
|
|||
filters);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test "must pass one"
|
||||
* @throws Exception
|
||||
*/
|
||||
public void testMPONE() throws Exception {
|
||||
MPONETests(filterMPONE);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test "must pass all"
|
||||
* @throws Exception
|
||||
*/
|
||||
public void testMPALL() throws Exception {
|
||||
MPALLTests(filterMPALL);
|
||||
}
|
||||
|
||||
/**
|
||||
* Test serialization
|
||||
* @throws Exception
|
||||
*/
|
||||
public void testSerialization() throws Exception {
|
||||
// Decompose filterMPALL to bytes.
|
||||
ByteArrayOutputStream stream = new ByteArrayOutputStream();
|
||||
|
|
|
@ -28,22 +28,35 @@ import org.apache.hadoop.io.Text;
|
|||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
/**
|
||||
* Tests the stop row filter
|
||||
*/
|
||||
public class TestStopRowFilter extends TestCase {
|
||||
private final Text STOP_ROW = new Text("stop_row");
|
||||
private final Text GOOD_ROW = new Text("good_row");
|
||||
private final Text PAST_STOP_ROW = new Text("zzzzzz");
|
||||
|
||||
RowFilterInterface mainFilter;
|
||||
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
mainFilter = new StopRowFilter(STOP_ROW);
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests identification of the stop row
|
||||
* @throws Exception
|
||||
*/
|
||||
public void testStopRowIdentification() throws Exception {
|
||||
stopRowTests(mainFilter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests serialization
|
||||
* @throws Exception
|
||||
*/
|
||||
public void testSerialization() throws Exception {
|
||||
// Decompose mainFilter to bytes.
|
||||
ByteArrayOutputStream stream = new ByteArrayOutputStream();
|
||||
|
|
|
@ -28,11 +28,16 @@ import junit.framework.TestCase;
|
|||
|
||||
import org.apache.hadoop.io.Text;
|
||||
|
||||
/**
|
||||
* Tests for the while-match filter
|
||||
*/
|
||||
public class TestWhileMatchRowFilter extends TestCase {
|
||||
|
||||
WhileMatchRowFilter wmStopRowFilter;
|
||||
WhileMatchRowFilter wmRegExpRowFilter;
|
||||
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
wmStopRowFilter = new WhileMatchRowFilter(new StopRowFilter(
|
||||
|
@ -41,14 +46,26 @@ public class TestWhileMatchRowFilter extends TestCase {
|
|||
".*regex.*"));
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests while match stop row
|
||||
* @throws Exception
|
||||
*/
|
||||
public void testWhileMatchStopRow() throws Exception {
|
||||
whileMatchStopRowTests(wmStopRowFilter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests while match regex
|
||||
* @throws Exception
|
||||
*/
|
||||
public void testWhileMatchRegExp() throws Exception {
|
||||
whileMatchRegExpTests(wmRegExpRowFilter);
|
||||
}
|
||||
|
||||
/**
|
||||
* Tests serialization
|
||||
* @throws Exception
|
||||
*/
|
||||
public void testSerialization() throws Exception {
|
||||
// Decompose wmRegExpRowFilter to bytes.
|
||||
ByteArrayOutputStream stream = new ByteArrayOutputStream();
|
||||
|
|
|
@ -21,13 +21,13 @@ package org.apache.hadoop.hbase.shell;
|
|||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.HClient;
|
||||
import org.apache.hadoop.hbase.shell.generated.ParseException;
|
||||
import org.apache.hadoop.hbase.shell.generated.Parser;
|
||||
|
||||
/**
|
||||
* Tests for HBase shell
|
||||
*/
|
||||
public class TestHBaseShell extends TestCase {
|
||||
/** test parsing */
|
||||
public void testParse() {
|
||||
String queryString1 = "SELECT test_table WHERE row='row_key' and " +
|
||||
"column='column_key';";
|
||||
|
|
|
@ -21,16 +21,27 @@ package org.apache.hadoop.hbase.util;
|
|||
|
||||
import junit.framework.TestCase;
|
||||
|
||||
/**
|
||||
* Tests url transformations
|
||||
*/
|
||||
public class TestKeying extends TestCase {
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
protected void setUp() throws Exception {
|
||||
super.setUp();
|
||||
}
|
||||
|
||||
/** {@inheritDoc} */
|
||||
@Override
|
||||
protected void tearDown() throws Exception {
|
||||
super.tearDown();
|
||||
}
|
||||
|
||||
/**
|
||||
* Test url transformations
|
||||
* @throws Exception
|
||||
*/
|
||||
public void testURI() throws Exception {
|
||||
checkTransform("http://abc:bcd@www.example.com/index.html" +
|
||||
"?query=something#middle");
|
||||
|
|
Loading…
Reference in New Issue