HBASE-5870 Hadoop 23 compilation broken because JobTrackerRunner#getJobTracker() method is not found

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1330563 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Zhihong Yu 2012-04-25 21:19:36 +00:00
parent 0be3011b5f
commit 8964098cd8
2 changed files with 47 additions and 2 deletions

View File

@ -66,6 +66,7 @@ import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
import org.apache.hadoop.hbase.io.hfile.ChecksumUtil; import org.apache.hadoop.hbase.io.hfile.ChecksumUtil;
import org.apache.hadoop.hbase.io.hfile.Compression; import org.apache.hadoop.hbase.io.hfile.Compression;
import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm; import org.apache.hadoop.hbase.io.hfile.Compression.Algorithm;
import org.apache.hadoop.hbase.mapreduce.MapreduceTestingShim;
import org.apache.hadoop.hbase.master.HMaster; import org.apache.hadoop.hbase.master.HMaster;
import org.apache.hadoop.hbase.regionserver.HRegion; import org.apache.hadoop.hbase.regionserver.HRegion;
import org.apache.hadoop.hbase.regionserver.HRegionServer; import org.apache.hadoop.hbase.regionserver.HRegionServer;
@ -87,6 +88,7 @@ import org.apache.hadoop.hbase.zookeeper.ZooKeeperWatcher;
import org.apache.hadoop.hdfs.DFSClient; import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.DistributedFileSystem;
import org.apache.hadoop.hdfs.MiniDFSCluster; import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MiniMRCluster; import org.apache.hadoop.mapred.MiniMRCluster;
import org.apache.hadoop.mapred.TaskLog; import org.apache.hadoop.mapred.TaskLog;
import org.apache.zookeeper.KeeperException; import org.apache.zookeeper.KeeperException;
@ -1330,8 +1332,11 @@ public class HBaseTestingUtility {
// Allow the user to override FS URI for this map-reduce cluster to use. // Allow the user to override FS URI for this map-reduce cluster to use.
mrCluster = new MiniMRCluster(servers, mrCluster = new MiniMRCluster(servers,
FS_URI != null ? FS_URI : FileSystem.get(conf).getUri().toString(), 1); FS_URI != null ? FS_URI : FileSystem.get(conf).getUri().toString(), 1);
mrCluster.getJobTrackerRunner().getJobTracker().getConf().set("mapred.local.dir", JobConf jobConf = MapreduceTestingShim.getJobConf(mrCluster);
if (jobConf != null) {
jobConf.set("mapred.local.dir",
conf.get("mapred.local.dir")); //Hadoop MiniMR overwrites this while it should not conf.get("mapred.local.dir")); //Hadoop MiniMR overwrites this while it should not
}
LOG.info("Mini mapreduce cluster started"); LOG.info("Mini mapreduce cluster started");
conf.set("mapred.job.tracker", conf.set("mapred.job.tracker",
mrCluster.createJobConf().get("mapred.job.tracker")); mrCluster.createJobConf().get("mapred.job.tracker"));

View File

@ -19,9 +19,12 @@ package org.apache.hadoop.hbase.mapreduce;
import java.io.IOException; import java.io.IOException;
import java.lang.reflect.Constructor; import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method; import java.lang.reflect.Method;
import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MiniMRCluster;
import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.JobID; import org.apache.hadoop.mapreduce.JobID;
@ -34,6 +37,7 @@ import org.apache.hadoop.mapreduce.JobID;
*/ */
abstract public class MapreduceTestingShim { abstract public class MapreduceTestingShim {
private static MapreduceTestingShim instance; private static MapreduceTestingShim instance;
private static Class[] emptyParam = new Class[] {};
static { static {
try { try {
@ -49,11 +53,17 @@ abstract public class MapreduceTestingShim {
abstract public JobContext newJobContext(Configuration jobConf) abstract public JobContext newJobContext(Configuration jobConf)
throws IOException; throws IOException;
abstract public JobConf obtainJobConf(MiniMRCluster cluster);
public static JobContext createJobContext(Configuration jobConf) public static JobContext createJobContext(Configuration jobConf)
throws IOException { throws IOException {
return instance.newJobContext(jobConf); return instance.newJobContext(jobConf);
} }
public static JobConf getJobConf(MiniMRCluster cluster) {
return instance.obtainJobConf(cluster);
}
private static class MapreduceV1Shim extends MapreduceTestingShim { private static class MapreduceV1Shim extends MapreduceTestingShim {
public JobContext newJobContext(Configuration jobConf) throws IOException { public JobContext newJobContext(Configuration jobConf) throws IOException {
// Implementing: // Implementing:
@ -68,6 +78,23 @@ abstract public class MapreduceTestingShim {
"Failed to instantiate new JobContext(jobConf, new JobID())", e); "Failed to instantiate new JobContext(jobConf, new JobID())", e);
} }
} }
public JobConf obtainJobConf(MiniMRCluster cluster) {
if (cluster == null) return null;
try {
Object runner = cluster.getJobTrackerRunner();
Method meth = runner.getClass().getDeclaredMethod("getJobTracker", emptyParam);
Object tracker = meth.invoke(runner, new Object []{});
Method m = tracker.getClass().getDeclaredMethod("getConf", emptyParam);
return (JobConf) m.invoke(tracker, new Object []{});
} catch (NoSuchMethodException nsme) {
return null;
} catch (InvocationTargetException ite) {
return null;
} catch (IllegalAccessException iae) {
return null;
}
}
}; };
private static class MapreduceV2Shim extends MapreduceTestingShim { private static class MapreduceV2Shim extends MapreduceTestingShim {
@ -83,6 +110,19 @@ abstract public class MapreduceTestingShim {
"Failed to return from Job.getInstance(jobConf)"); "Failed to return from Job.getInstance(jobConf)");
} }
} }
public JobConf obtainJobConf(MiniMRCluster cluster) {
try {
Method meth = MiniMRCluster.class.getMethod("getJobTrackerConf", emptyParam);
return (JobConf) meth.invoke(cluster, new Object []{});
} catch (NoSuchMethodException nsme) {
return null;
} catch (InvocationTargetException ite) {
return null;
} catch (IllegalAccessException iae) {
return null;
}
}
}; };
} }