HADOOP-1913 Build a Lucene index on an HBase table
git-svn-id: https://svn.apache.org/repos/asf/lucene/hadoop/trunk/src/contrib/hbase@578987 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
6aab1c914f
commit
8f3b423b73
|
@ -20,6 +20,8 @@ Trunk (unreleased changes)
|
||||||
set of operators, for creating, altering, dropping, inserting,
|
set of operators, for creating, altering, dropping, inserting,
|
||||||
deleting, and selecting, etc., data in hbase.
|
deleting, and selecting, etc., data in hbase.
|
||||||
(Inchul Song and Edward Yoon via Stack)
|
(Inchul Song and Edward Yoon via Stack)
|
||||||
|
HADOOP-1913 Build a Lucene index on an HBase table
|
||||||
|
(Ning Li via Stack)
|
||||||
|
|
||||||
OPTIMIZATIONS
|
OPTIMIZATIONS
|
||||||
|
|
||||||
|
|
|
@ -92,7 +92,8 @@ public class HConnectionManager implements HConstants {
|
||||||
|
|
||||||
/* encapsulates finding the servers for an HBase instance */
|
/* encapsulates finding the servers for an HBase instance */
|
||||||
private static class TableServers implements HConnection, HConstants {
|
private static class TableServers implements HConnection, HConstants {
|
||||||
private final Log LOG = LogFactory.getLog(this.getClass().getName());
|
private static final Log LOG = LogFactory.getLog(TableServers.class.
|
||||||
|
getName());
|
||||||
private final Class<? extends HRegionInterface> serverInterfaceClass;
|
private final Class<? extends HRegionInterface> serverInterfaceClass;
|
||||||
private final long threadWakeFrequency;
|
private final long threadWakeFrequency;
|
||||||
private final long pause;
|
private final long pause;
|
||||||
|
|
|
@ -27,9 +27,12 @@ import org.apache.hadoop.fs.FileSystem;
|
||||||
import org.apache.hadoop.fs.Path;
|
import org.apache.hadoop.fs.Path;
|
||||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
|
import org.apache.log4j.Logger;
|
||||||
|
|
||||||
/** Abstract base class for merge tests */
|
/** Abstract base class for merge tests */
|
||||||
public abstract class AbstractMergeTestBase extends HBaseTestCase {
|
public abstract class AbstractMergeTestBase extends HBaseTestCase {
|
||||||
|
static final Logger LOG =
|
||||||
|
Logger.getLogger(AbstractMergeTestBase.class.getName());
|
||||||
protected static final Text COLUMN_NAME = new Text("contents:");
|
protected static final Text COLUMN_NAME = new Text("contents:");
|
||||||
protected final Random rand = new Random();
|
protected final Random rand = new Random();
|
||||||
protected HTableDescriptor desc;
|
protected HTableDescriptor desc;
|
||||||
|
@ -119,8 +122,12 @@ public abstract class AbstractMergeTestBase extends HBaseTestCase {
|
||||||
if (dfsCluster != null) {
|
if (dfsCluster != null) {
|
||||||
dfsCluster.shutdown();
|
dfsCluster.shutdown();
|
||||||
}
|
}
|
||||||
if (fs != null) {
|
if (this.fs != null) {
|
||||||
fs.close();
|
try {
|
||||||
|
this.fs.close();
|
||||||
|
} catch (IOException e) {
|
||||||
|
LOG.info("During tear down got a " + e.getMessage());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -151,5 +158,4 @@ public abstract class AbstractMergeTestBase extends HBaseTestCase {
|
||||||
region.getRegionInfo().offLine = true;
|
region.getRegionInfo().offLine = true;
|
||||||
return region;
|
return region;
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
@ -97,7 +97,7 @@ public class MiniHBaseCluster implements HConstants {
|
||||||
|
|
||||||
this.conf = conf;
|
this.conf = conf;
|
||||||
this.fs = dfsCluster.getFileSystem();
|
this.fs = dfsCluster.getFileSystem();
|
||||||
this.cluster = null;
|
this.cluster = dfsCluster;
|
||||||
init(nRegionNodes);
|
init(nRegionNodes);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -390,7 +390,7 @@ public class MiniHBaseCluster implements HConstants {
|
||||||
regionServerThreads.size() + " region server(s)");
|
regionServerThreads.size() + " region server(s)");
|
||||||
}
|
}
|
||||||
|
|
||||||
void shutdown() {
|
public void shutdown() {
|
||||||
MiniHBaseCluster.shutdown(this.masterThread, this.regionThreads);
|
MiniHBaseCluster.shutdown(this.masterThread, this.regionThreads);
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
|
|
@ -22,6 +22,8 @@ package org.apache.hadoop.hbase;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.TreeMap;
|
import java.util.TreeMap;
|
||||||
|
|
||||||
|
import org.apache.commons.logging.Log;
|
||||||
|
import org.apache.commons.logging.LogFactory;
|
||||||
import org.apache.hadoop.io.Text;
|
import org.apache.hadoop.io.Text;
|
||||||
import org.apache.log4j.Level;
|
import org.apache.log4j.Level;
|
||||||
import org.apache.log4j.Logger;
|
import org.apache.log4j.Logger;
|
||||||
|
@ -30,6 +32,7 @@ import org.apache.log4j.Logger;
|
||||||
* Tests region server failover when a region server exits.
|
* Tests region server failover when a region server exits.
|
||||||
*/
|
*/
|
||||||
public class TestRegionServerAbort extends HBaseClusterTestCase {
|
public class TestRegionServerAbort extends HBaseClusterTestCase {
|
||||||
|
private final Log LOG = LogFactory.getLog(this.getClass().getName());
|
||||||
private HTable table;
|
private HTable table;
|
||||||
|
|
||||||
/** constructor */
|
/** constructor */
|
||||||
|
@ -39,7 +42,8 @@ public class TestRegionServerAbort extends HBaseClusterTestCase {
|
||||||
conf.setInt("ipc.client.connect.max.retries", 5); // and number of retries
|
conf.setInt("ipc.client.connect.max.retries", 5); // and number of retries
|
||||||
conf.setInt("hbase.client.retries.number", 5); // reduce HBase retries
|
conf.setInt("hbase.client.retries.number", 5); // reduce HBase retries
|
||||||
Logger.getRootLogger().setLevel(Level.WARN);
|
Logger.getRootLogger().setLevel(Level.WARN);
|
||||||
Logger.getLogger(this.getClass().getPackage().getName()).setLevel(Level.DEBUG);
|
Logger.getLogger(this.getClass().getPackage().getName()).
|
||||||
|
setLevel(Level.DEBUG);
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
@ -68,14 +72,14 @@ public class TestRegionServerAbort extends HBaseClusterTestCase {
|
||||||
this.cluster.startRegionServer();
|
this.cluster.startRegionServer();
|
||||||
// Now shutdown the region server and wait for it to go down.
|
// Now shutdown the region server and wait for it to go down.
|
||||||
this.cluster.abortRegionServer(0);
|
this.cluster.abortRegionServer(0);
|
||||||
this.cluster.waitOnRegionServer(0);
|
LOG.info(this.cluster.waitOnRegionServer(0) + " has been shutdown");
|
||||||
|
HScannerInterface scanner = null;
|
||||||
// Verify that the client can find the data after the region has been moved
|
|
||||||
// to a different server
|
|
||||||
HScannerInterface scanner =
|
|
||||||
table.obtainScanner(HConstants.COLUMN_FAMILY_ARRAY, new Text());
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
|
// Verify that the client can find the data after the region has moved
|
||||||
|
// to a different server
|
||||||
|
scanner =
|
||||||
|
table.obtainScanner(HConstants.COLUMN_FAMILY_ARRAY, new Text());
|
||||||
|
LOG.info("Obtained scanner " + scanner);
|
||||||
HStoreKey key = new HStoreKey();
|
HStoreKey key = new HStoreKey();
|
||||||
TreeMap<Text, byte[]> results = new TreeMap<Text, byte[]>();
|
TreeMap<Text, byte[]> results = new TreeMap<Text, byte[]>();
|
||||||
while (scanner.next(key, results)) {
|
while (scanner.next(key, results)) {
|
||||||
|
@ -83,11 +87,13 @@ public class TestRegionServerAbort extends HBaseClusterTestCase {
|
||||||
assertEquals(1, results.size());
|
assertEquals(1, results.size());
|
||||||
byte[] bytes = results.get(HConstants.COLUMN_FAMILY);
|
byte[] bytes = results.get(HConstants.COLUMN_FAMILY);
|
||||||
assertNotNull(bytes);
|
assertNotNull(bytes);
|
||||||
assertTrue(tableName.equals(new String(bytes, HConstants.UTF8_ENCODING)));
|
assertTrue(tableName.equals(new String(bytes,
|
||||||
|
HConstants.UTF8_ENCODING)));
|
||||||
}
|
}
|
||||||
System.out.println("Success!");
|
LOG.info("Success!");
|
||||||
} finally {
|
} finally {
|
||||||
|
LOG.info("Closing scanner " + scanner);
|
||||||
scanner.close();
|
scanner.close();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
|
@ -118,7 +118,11 @@ public class TestTableMapReduce extends MultiRegionTable {
|
||||||
}
|
}
|
||||||
|
|
||||||
if (fs != null) {
|
if (fs != null) {
|
||||||
fs.close();
|
try {
|
||||||
|
fs.close();
|
||||||
|
} catch (IOException e) {
|
||||||
|
LOG.info("During tear down got a " + e.getMessage());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue