REVERT wrong commit (r1330105), sorry

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1330347 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
larsh 2012-04-25 15:34:30 +00:00
parent 33e6443704
commit 13f1e3acff
2 changed files with 2 additions and 38 deletions

View File

@ -66,7 +66,6 @@ import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.ChecksumUtil; import org.apache.hadoop.hbase.io.hfile.ChecksumUtil;
import org.apache.hadoop.hbase.io.hfile.Compression; import org.apache.hadoop.hbase.io.hfile.Compression;
import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm; import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm;
import org.apache.hadoop.hbase.mapreduce.MapreduceTestingShim;
import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.regionserver.HRegionServer;
@ -88,7 +87,6 @@ import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.hadoop.hdfs.DFSClient; import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MiniMRCluster; import org.apache.hadoop.mapred.MiniMRCluster;
import org.apache.hadoop.mapred.TaskLog; import org.apache.hadoop.mapred.TaskLog;
import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException;
@ -1332,11 +1330,8 @@ public class HBaseTestingUtility {
// Allow the user to override FS URI for this map-reduce cluster to use. // Allow the user to override FS URI for this map-reduce cluster to use.
mrCluster = new MiniMRCluster(servers, mrCluster = new MiniMRCluster(servers,
FS_URI != null ? FS_URI : FileSystem.get(conf).getUri().toString(), 1); FS_URI != null ? FS_URI : FileSystem.get(conf).getUri().toString(), 1);
JobConf jobConf = MapreduceTestingShim.getJobConf(mrCluster); mrCluster.getJobTrackerRunner().getJobTracker().getConf().set("mapred.local.dir",
if (jobConf != null) { conf.get("mapred.local.dir")); //Hadoop MiniMR overwrites this while it should not
jobConf.set("mapred.local.dir",
conf.get("mapred.local.dir")); //Hadoop MiniMR overwrites this while it should not
}
LOG.info("Mini mapreduce cluster started"); LOG.info("Mini mapreduce cluster started");
conf.set("mapred.job.tracker", conf.set("mapred.job.tracker",
mrCluster.createJobConf().get("mapred.job.tracker")); mrCluster.createJobConf().get("mapred.job.tracker"));

View File

@ -19,12 +19,9 @@ package org.apache.hadoop.hbase.mapreduce;
import java.io.IOException; import java.io.IOException;
import java.lang.reflect.Constructor; import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MiniMRCluster;
import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.JobID;
@ -37,7 +34,6 @@ import org.apache.hadoop.mapreduce.JobID;
*/ */
abstract public class MapreduceTestingShim { abstract public class MapreduceTestingShim {
private static MapreduceTestingShim instance; private static MapreduceTestingShim instance;
private static Class[] emptyParam = new Class[] {};
static { static {
try { try {
@ -52,17 +48,11 @@ abstract public class MapreduceTestingShim {
abstract public JobContext newJobContext(Configuration jobConf) abstract public JobContext newJobContext(Configuration jobConf)
throws IOException; throws IOException;
abstract public JobConf obtainJobConf(MiniMRCluster cluster);
public static JobContext createJobContext(Configuration jobConf) public static JobContext createJobContext(Configuration jobConf)
throws IOException { throws IOException {
return instance.newJobContext(jobConf); return instance.newJobContext(jobConf);
} }
public static JobConf getJobConf(MiniMRCluster cluster) {
return instance.obtainJobConf(cluster);
}
private static class MapreduceV1Shim extends MapreduceTestingShim { private static class MapreduceV1Shim extends MapreduceTestingShim {
public JobContext newJobContext(Configuration jobConf) throws IOException { public JobContext newJobContext(Configuration jobConf) throws IOException {
@ -78,23 +68,6 @@ abstract public class MapreduceTestingShim {
"Failed to instantiate new JobContext(jobConf, new JobID())", e); "Failed to instantiate new JobContext(jobConf, new JobID())", e);
} }
} }
public JobConf obtainJobConf(MiniMRCluster cluster) {
if (cluster == null) return null;
try {
Object runner = cluster.getJobTrackerRunner();
Method meth = runner.getClass().getDeclaredMethod("getJobTracker", emptyParam);
Object tracker = meth.invoke(runner, new Object []{});
Method m = tracker.getClass().getDeclaredMethod("getConf", emptyParam);
return (JobConf) m.invoke(tracker, new Object []{});
} catch (NoSuchMethodException nsme) {
return null;
} catch (InvocationTargetException ite) {
return null;
} catch (IllegalAccessException iae) {
return null;
}
}
}; };
private static class MapreduceV2Shim extends MapreduceTestingShim { private static class MapreduceV2Shim extends MapreduceTestingShim {
@ -110,10 +83,6 @@ abstract public class MapreduceTestingShim {
"Failed to return from Job.getInstance(jobConf)"); "Failed to return from Job.getInstance(jobConf)");
} }
} }
public JobConf obtainJobConf(MiniMRCluster cluster) {
return null;
}
}; };
} }