HBASE-9558 PerformanceEvaluation is in hbase-server, and creates a dependency to MiniDFSCluster
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1524985 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
1bd014c106
commit
1eb261ab42
|
@ -157,13 +157,6 @@ limitations under the License.
|
||||||
<groupId>com.yammer.metrics</groupId>
|
<groupId>com.yammer.metrics</groupId>
|
||||||
<artifactId>metrics-core</artifactId>
|
<artifactId>metrics-core</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- This was marked as test dep in earlier pom, but was scoped compile. Where
|
|
||||||
do we actually need it? -->
|
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-minicluster</artifactId>
|
|
||||||
<version>${hadoop-two.version}</version>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>commons-lang</groupId>
|
<groupId>commons-lang</groupId>
|
||||||
<artifactId>commons-lang</artifactId>
|
<artifactId>commons-lang</artifactId>
|
||||||
|
|
|
@ -347,10 +347,6 @@
|
||||||
<groupId>com.github.stephenc.high-scale-lib</groupId>
|
<groupId>com.github.stephenc.high-scale-lib</groupId>
|
||||||
<artifactId>high-scale-lib</artifactId>
|
<artifactId>high-scale-lib</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>commons-codec</groupId>
|
|
||||||
<artifactId>commons-codec</artifactId>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>commons-io</groupId>
|
<groupId>commons-io</groupId>
|
||||||
<artifactId>commons-io</artifactId>
|
<artifactId>commons-io</artifactId>
|
||||||
|
@ -423,10 +419,6 @@
|
||||||
<groupId>org.codehaus.jackson</groupId>
|
<groupId>org.codehaus.jackson</groupId>
|
||||||
<artifactId>jackson-jaxrs</artifactId>
|
<artifactId>jackson-jaxrs</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>org.codehaus.jackson</groupId>
|
|
||||||
<artifactId>jackson-xc</artifactId>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>tomcat</groupId>
|
<groupId>tomcat</groupId>
|
||||||
<artifactId>jasper-compiler</artifactId>
|
<artifactId>jasper-compiler</artifactId>
|
||||||
|
@ -465,11 +457,6 @@
|
||||||
<artifactId>stax-api</artifactId>
|
<artifactId>stax-api</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<!-- Test Dependencies -->
|
<!-- Test Dependencies -->
|
||||||
<dependency>
|
|
||||||
<groupId>org.codehaus.jettison</groupId>
|
|
||||||
<artifactId>jettison</artifactId>
|
|
||||||
<scope>test</scope>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.cloudera.htrace</groupId>
|
<groupId>org.cloudera.htrace</groupId>
|
||||||
<artifactId>htrace-core</artifactId>
|
<artifactId>htrace-core</artifactId>
|
||||||
|
@ -625,18 +612,10 @@
|
||||||
<artifactId>hadoop-hdfs</artifactId>
|
<artifactId>hadoop-hdfs</artifactId>
|
||||||
<type>test-jar</type>
|
<type>test-jar</type>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-client</artifactId>
|
|
||||||
</dependency>
|
|
||||||
<dependency>
|
<dependency>
|
||||||
<groupId>org.apache.hadoop</groupId>
|
<groupId>org.apache.hadoop</groupId>
|
||||||
<artifactId>hadoop-annotations</artifactId>
|
<artifactId>hadoop-annotations</artifactId>
|
||||||
</dependency>
|
</dependency>
|
||||||
<dependency>
|
|
||||||
<groupId>org.apache.hadoop</groupId>
|
|
||||||
<artifactId>hadoop-minicluster</artifactId>
|
|
||||||
</dependency>
|
|
||||||
</dependencies>
|
</dependencies>
|
||||||
<build>
|
<build>
|
||||||
<plugins>
|
<plugins>
|
||||||
|
|
|
@ -46,7 +46,6 @@ import org.apache.hadoop.hbase.client.Get;
|
||||||
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
import org.apache.hadoop.hbase.client.HBaseAdmin;
|
||||||
import org.apache.hadoop.hbase.client.HConnection;
|
import org.apache.hadoop.hbase.client.HConnection;
|
||||||
import org.apache.hadoop.hbase.client.HConnectionManager;
|
import org.apache.hadoop.hbase.client.HConnectionManager;
|
||||||
import org.apache.hadoop.hbase.client.HTable;
|
|
||||||
import org.apache.hadoop.hbase.client.HTableInterface;
|
import org.apache.hadoop.hbase.client.HTableInterface;
|
||||||
import org.apache.hadoop.hbase.client.Put;
|
import org.apache.hadoop.hbase.client.Put;
|
||||||
import org.apache.hadoop.hbase.client.Result;
|
import org.apache.hadoop.hbase.client.Result;
|
||||||
|
@ -63,12 +62,9 @@ import org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil;
|
||||||
import org.apache.hadoop.hbase.io.compress.Compression;
|
import org.apache.hadoop.hbase.io.compress.Compression;
|
||||||
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
|
import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
|
||||||
import org.apache.hadoop.hbase.util.Bytes;
|
import org.apache.hadoop.hbase.util.Bytes;
|
||||||
import org.apache.hadoop.hbase.util.FSUtils;
|
|
||||||
import org.apache.hadoop.hbase.util.Hash;
|
import org.apache.hadoop.hbase.util.Hash;
|
||||||
import org.apache.hadoop.hbase.util.MurmurHash;
|
import org.apache.hadoop.hbase.util.MurmurHash;
|
||||||
import org.apache.hadoop.hbase.util.Pair;
|
import org.apache.hadoop.hbase.util.Pair;
|
||||||
import org.apache.hadoop.hbase.zookeeper.MiniZooKeeperCluster;
|
|
||||||
import org.apache.hadoop.hdfs.MiniDFSCluster;
|
|
||||||
import org.apache.hadoop.conf.Configured;
|
import org.apache.hadoop.conf.Configured;
|
||||||
import org.apache.hadoop.io.LongWritable;
|
import org.apache.hadoop.io.LongWritable;
|
||||||
import org.apache.hadoop.io.NullWritable;
|
import org.apache.hadoop.io.NullWritable;
|
||||||
|
@ -122,7 +118,6 @@ public class PerformanceEvaluation extends Configured implements Tool {
|
||||||
|
|
||||||
protected Map<String, CmdDescriptor> commands = new TreeMap<String, CmdDescriptor>();
|
protected Map<String, CmdDescriptor> commands = new TreeMap<String, CmdDescriptor>();
|
||||||
|
|
||||||
private boolean miniCluster = false;
|
|
||||||
private boolean nomapred = false;
|
private boolean nomapred = false;
|
||||||
private int rowPrefixLength = DEFAULT_ROW_PREFIX_LENGTH;
|
private int rowPrefixLength = DEFAULT_ROW_PREFIX_LENGTH;
|
||||||
private int N = 1;
|
private int N = 1;
|
||||||
|
@ -1247,28 +1242,6 @@ public class PerformanceEvaluation extends Configured implements Tool {
|
||||||
|
|
||||||
private void runTest(final Class<? extends Test> cmd) throws IOException,
|
private void runTest(final Class<? extends Test> cmd) throws IOException,
|
||||||
InterruptedException, ClassNotFoundException {
|
InterruptedException, ClassNotFoundException {
|
||||||
MiniHBaseCluster hbaseMiniCluster = null;
|
|
||||||
MiniDFSCluster dfsCluster = null;
|
|
||||||
MiniZooKeeperCluster zooKeeperCluster = null;
|
|
||||||
Configuration conf = getConf();
|
|
||||||
if (this.miniCluster) {
|
|
||||||
dfsCluster = new MiniDFSCluster(conf, 2, true, (String[])null);
|
|
||||||
zooKeeperCluster = new MiniZooKeeperCluster(conf);
|
|
||||||
int zooKeeperPort = zooKeeperCluster.startup(new File(System.getProperty("java.io.tmpdir")));
|
|
||||||
|
|
||||||
// mangle the conf so that the fs parameter points to the minidfs we
|
|
||||||
// just started up
|
|
||||||
FileSystem fs = dfsCluster.getFileSystem();
|
|
||||||
FSUtils.setFsDefault(conf, new Path(fs.getUri()));
|
|
||||||
conf.set(HConstants.ZOOKEEPER_CLIENT_PORT, Integer.toString(zooKeeperPort));
|
|
||||||
Path parentdir = fs.getHomeDirectory();
|
|
||||||
FSUtils.setRootDir(conf, parentdir);
|
|
||||||
fs.mkdirs(parentdir);
|
|
||||||
FSUtils.setVersion(fs, parentdir);
|
|
||||||
hbaseMiniCluster = new MiniHBaseCluster(conf, N);
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
if (N == 1) {
|
if (N == 1) {
|
||||||
// If there is only one client and one HRegionServer, we assume nothing
|
// If there is only one client and one HRegionServer, we assume nothing
|
||||||
// has been set up at all.
|
// has been set up at all.
|
||||||
|
@ -1277,13 +1250,6 @@ public class PerformanceEvaluation extends Configured implements Tool {
|
||||||
// Else, run
|
// Else, run
|
||||||
runNIsMoreThanOne(cmd);
|
runNIsMoreThanOne(cmd);
|
||||||
}
|
}
|
||||||
} finally {
|
|
||||||
if(this.miniCluster) {
|
|
||||||
if (hbaseMiniCluster != null) hbaseMiniCluster.shutdown();
|
|
||||||
if (zooKeeperCluster != null) zooKeeperCluster.shutdown();
|
|
||||||
HBaseTestCase.shutdownDfs(dfsCluster);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
protected void printUsage() {
|
protected void printUsage() {
|
||||||
|
@ -1295,11 +1261,10 @@ public class PerformanceEvaluation extends Configured implements Tool {
|
||||||
System.err.println(message);
|
System.err.println(message);
|
||||||
}
|
}
|
||||||
System.err.println("Usage: java " + this.getClass().getName() + " \\");
|
System.err.println("Usage: java " + this.getClass().getName() + " \\");
|
||||||
System.err.println(" [--miniCluster] [--nomapred] [--rows=ROWS] [--table=NAME] \\");
|
System.err.println(" [--nomapred] [--rows=ROWS] [--table=NAME] \\");
|
||||||
System.err.println(" [--compress=TYPE] [--blockEncoding=TYPE] [-D<property=value>]* <command> <nclients>");
|
System.err.println(" [--compress=TYPE] [--blockEncoding=TYPE] [-D<property=value>]* <command> <nclients>");
|
||||||
System.err.println();
|
System.err.println();
|
||||||
System.err.println("Options:");
|
System.err.println("Options:");
|
||||||
System.err.println(" miniCluster Run the test on an HBaseMiniCluster");
|
|
||||||
System.err.println(" nomapred Run multiple clients using threads " +
|
System.err.println(" nomapred Run multiple clients using threads " +
|
||||||
"(rather than use mapreduce)");
|
"(rather than use mapreduce)");
|
||||||
System.err.println(" rows Rows each client runs. Default: One million");
|
System.err.println(" rows Rows each client runs. Default: One million");
|
||||||
|
@ -1362,12 +1327,6 @@ public class PerformanceEvaluation extends Configured implements Tool {
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
final String miniClusterArgKey = "--miniCluster";
|
|
||||||
if (cmd.startsWith(miniClusterArgKey)) {
|
|
||||||
this.miniCluster = true;
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
final String nmr = "--nomapred";
|
final String nmr = "--nomapred";
|
||||||
if (cmd.startsWith(nmr)) {
|
if (cmd.startsWith(nmr)) {
|
||||||
this.nomapred = true;
|
this.nomapred = true;
|
||||||
|
|
Loading…
Reference in New Issue