Revert HBASE-6330 - hadoop 2.0 was fixed but this broke hadoop 1.0 build.

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1357844 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jonathan Hsieh 2012-07-05 19:56:17 +00:00
parent 5b717d965b
commit 0fc4929d66
3 changed files with 77 additions and 15 deletions

View File

@ -330,6 +330,21 @@ public class HBaseTestingUtility {
createSubDirAndSystemProperty(
"hadoop.log.dir",
testPath, "hadoop-log-dir");
// This is defaulted in core-default.xml to /tmp/hadoop-${user.name}, but
// we want our own value to ensure uniqueness on the same machine
createSubDirAndSystemProperty(
"hadoop.tmp.dir",
testPath, "hadoop-tmp-dir");
// Read and modified in org.apache.hadoop.mapred.MiniMRCluster
createSubDir(
"mapred.local.dir",
testPath, "mapred-local-dir");
createSubDirAndSystemProperty(
"mapred.working.dir",
testPath, "mapred-working-dir");
}
private void createSubDir(String propertyName, Path parent, String subDirName){
@ -422,7 +437,7 @@ public class HBaseTestingUtility {
return this.dfsCluster;
}
public MiniDFSCluster startMiniDFSClusterForTestHLog(int namenodePort) throws IOException {
createDirsAndSetProperties();
dfsCluster = new MiniDFSCluster(namenodePort, conf, 5, false, true, true, null,
@ -430,14 +445,17 @@ public class HBaseTestingUtility {
return dfsCluster;
}
/** This is used before starting HDFS and map-reduce mini-clusters */
private void createDirsAndSetProperties() {
setupClusterTestDir();
System.setProperty(TEST_DIRECTORY_KEY, clusterTestDir.getPath());
createDirAndSetProperty("cache_data", "test.cache.data");
createDirAndSetProperty("hadoop_tmp", "hadoop.tmp.dir");
hadoopLogDir = createDirAndSetProperty("hadoop_logs", "hadoop.log.dir");
createDirAndSetProperty("mapred_output", MapreduceTestingShim.getMROutputDirProp());
createDirAndSetProperty("mapred_local", "mapred.local.dir");
createDirAndSetProperty("mapred_system", "mapred.system.dir");
createDirAndSetProperty("mapred_temp", "mapred.temp.dir");
}
private String createDirAndSetProperty(final String relPath, String property) {
@ -1315,12 +1333,16 @@ public class HBaseTestingUtility {
// Allow the user to override FS URI for this map-reduce cluster to use.
mrCluster = new MiniMRCluster(servers,
FS_URI != null ? FS_URI : FileSystem.get(conf).getUri().toString(), 1);
JobConf jobConf = mrCluster.createJobConf();
JobConf jobConf = MapreduceTestingShim.getJobConf(mrCluster);
if (jobConf == null) {
jobConf = mrCluster.createJobConf();
}
jobConf.set("mapred.local.dir",
conf.get("mapred.local.dir")); //Hadoop MiniMR overwrites this while it should not
LOG.info("Mini mapreduce cluster started");
// This fixes TestImportTsv but breaks TestImportExport tests
conf.set("mapred.job.tracker",
mrCluster.createJobConf().get("mapred.job.tracker"));
// Needed for TestImportTsv.
conf.set("mapred.job.tracker", jobConf.get("mapred.job.tracker"));
// this for mrv2 support; mr1 ignores this
conf.set("mapreduce.framework.name", "yarn");
String rmAdress = jobConf.get("yarn.resourcemanager.address");

View File

@ -19,9 +19,12 @@ package org.apache.hadoop.hbase.mapreduce;
import java.io.IOException;
import java.lang.reflect.Constructor;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.MiniMRCluster;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.JobID;
@ -34,6 +37,7 @@ import org.apache.hadoop.mapreduce.JobID;
*/
abstract public class MapreduceTestingShim {
private static MapreduceTestingShim instance;
private static Class[] emptyParam = new Class[] {};
static {
try {
@ -48,6 +52,8 @@ abstract public class MapreduceTestingShim {
abstract public JobContext newJobContext(Configuration jobConf)
throws IOException;
abstract public JobConf obtainJobConf(MiniMRCluster cluster);
abstract public String obtainMROutputDirProp();
@ -55,6 +61,10 @@ abstract public class MapreduceTestingShim {
throws IOException {
return instance.newJobContext(jobConf);
}
public static JobConf getJobConf(MiniMRCluster cluster) {
return instance.obtainJobConf(cluster);
}
public static String getMROutputDirProp() {
return instance.obtainMROutputDirProp();
@ -74,6 +84,23 @@ abstract public class MapreduceTestingShim {
"Failed to instantiate new JobContext(jobConf, new JobID())", e);
}
}
public JobConf obtainJobConf(MiniMRCluster cluster) {
if (cluster == null) return null;
try {
Object runner = cluster.getJobTrackerRunner();
Method meth = runner.getClass().getDeclaredMethod("getJobTracker", emptyParam);
Object tracker = meth.invoke(runner, new Object []{});
Method m = tracker.getClass().getDeclaredMethod("getConf", emptyParam);
return (JobConf) m.invoke(tracker, new Object []{});
} catch (NoSuchMethodException nsme) {
return null;
} catch (InvocationTargetException ite) {
return null;
} catch (IllegalAccessException iae) {
return null;
}
}
@Override
public String obtainMROutputDirProp() {
@ -94,6 +121,19 @@ abstract public class MapreduceTestingShim {
"Failed to return from Job.getInstance(jobConf)");
}
}
public JobConf obtainJobConf(MiniMRCluster cluster) {
try {
Method meth = MiniMRCluster.class.getMethod("getJobTrackerConf", emptyParam);
return (JobConf) meth.invoke(cluster, new Object []{});
} catch (NoSuchMethodException nsme) {
return null;
} catch (InvocationTargetException ite) {
return null;
} catch (IllegalAccessException iae) {
return null;
}
}
@Override
public String obtainMROutputDirProp() {

View File

@ -17,7 +17,6 @@
*/
package org.apache.hadoop.hbase.mapreduce;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
import org.apache.hadoop.conf.Configuration;
@ -28,6 +27,7 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.MediumTests;
import org.apache.hadoop.hbase.MiniHBaseCluster;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
@ -36,7 +36,6 @@ import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.ResultScanner;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.HBaseFsck;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.util.GenericOptionsParser;
import org.junit.After;
@ -45,6 +44,7 @@ import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import static org.junit.Assert.assertEquals;
@Category(MediumTests.class)
public class TestImportExport {
@ -58,11 +58,12 @@ public class TestImportExport {
private static final byte[] QUAL = Bytes.toBytes("q");
private static final String OUTPUT_DIR = "outputdir";
private static MiniHBaseCluster cluster;
private static long now = System.currentTimeMillis();
@BeforeClass
public static void beforeClass() throws Exception {
UTIL.startMiniCluster();
cluster = UTIL.startMiniCluster();
UTIL.startMiniMapReduceCluster();
}
@ -104,16 +105,16 @@ public class TestImportExport {
"1000"
};
GenericOptionsParser opts = new GenericOptionsParser(new Configuration(UTIL.getConfiguration()), args);
GenericOptionsParser opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args);
Configuration conf = opts.getConfiguration();
args = opts.getRemainingArgs();
Job job = Export.createSubmittableJob(conf, args);
job.getConfiguration().set("mapreduce.framework.name", "yarn");
job.waitForCompletion(false);
HBaseFsck.debugLsr(conf, new Path("."));
assertTrue(job.isSuccessful());
String IMPORT_TABLE = "importTableSimpleCase";
t = UTIL.createTable(Bytes.toBytes(IMPORT_TABLE), FAMILYB);
args = new String[] {
@ -122,14 +123,13 @@ public class TestImportExport {
OUTPUT_DIR
};
opts = new GenericOptionsParser(new Configuration(UTIL.getConfiguration()), args);
opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args);
conf = opts.getConfiguration();
args = opts.getRemainingArgs();
job = Import.createSubmittableJob(conf, args);
job.getConfiguration().set("mapreduce.framework.name", "yarn");
job.waitForCompletion(false);
HBaseFsck.debugLsr(conf, new Path("."));
assertTrue(job.isSuccessful());
Get g = new Get(ROW1);
@ -174,7 +174,7 @@ public class TestImportExport {
"1000"
};
GenericOptionsParser opts = new GenericOptionsParser(new Configuration(UTIL.getConfiguration()), args);
GenericOptionsParser opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args);
Configuration conf = opts.getConfiguration();
args = opts.getRemainingArgs();
@ -198,7 +198,7 @@ public class TestImportExport {
OUTPUT_DIR
};
opts = new GenericOptionsParser(new Configuration(UTIL.getConfiguration()), args);
opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args);
conf = opts.getConfiguration();
args = opts.getRemainingArgs();