HBASE-6330 TestImportExport has been failing against hadoop 0.23/2.0 profile [part2]
git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1357480 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
c733938f14
commit
af21a030c0
|
@ -330,21 +330,6 @@ public class HBaseTestingUtility {
|
|||
createSubDirAndSystemProperty(
|
||||
"hadoop.log.dir",
|
||||
testPath, "hadoop-log-dir");
|
||||
|
||||
// This is defaulted in core-default.xml to /tmp/hadoop-${user.name}, but
|
||||
// we want our own value to ensure uniqueness on the same machine
|
||||
createSubDirAndSystemProperty(
|
||||
"hadoop.tmp.dir",
|
||||
testPath, "hadoop-tmp-dir");
|
||||
|
||||
// Read and modified in org.apache.hadoop.mapred.MiniMRCluster
|
||||
createSubDir(
|
||||
"mapred.local.dir",
|
||||
testPath, "mapred-local-dir");
|
||||
|
||||
createSubDirAndSystemProperty(
|
||||
"mapred.working.dir",
|
||||
testPath, "mapred-working-dir");
|
||||
}
|
||||
|
||||
private void createSubDir(String propertyName, Path parent, String subDirName){
|
||||
|
@ -437,7 +422,7 @@ public class HBaseTestingUtility {
|
|||
|
||||
return this.dfsCluster;
|
||||
}
|
||||
|
||||
|
||||
public MiniDFSCluster startMiniDFSClusterForTestHLog(int namenodePort) throws IOException {
|
||||
createDirsAndSetProperties();
|
||||
dfsCluster = new MiniDFSCluster(namenodePort, conf, 5, false, true, true, null,
|
||||
|
@ -445,17 +430,14 @@ public class HBaseTestingUtility {
|
|||
return dfsCluster;
|
||||
}
|
||||
|
||||
|
||||
/** This is used before starting HDFS and map-reduce mini-clusters */
|
||||
private void createDirsAndSetProperties() {
|
||||
setupClusterTestDir();
|
||||
System.setProperty(TEST_DIRECTORY_KEY, clusterTestDir.getPath());
|
||||
createDirAndSetProperty("cache_data", "test.cache.data");
|
||||
createDirAndSetProperty("hadoop_tmp", "hadoop.tmp.dir");
|
||||
hadoopLogDir = createDirAndSetProperty("hadoop_logs", "hadoop.log.dir");
|
||||
createDirAndSetProperty("mapred_output", MapreduceTestingShim.getMROutputDirProp());
|
||||
createDirAndSetProperty("mapred_local", "mapred.local.dir");
|
||||
createDirAndSetProperty("mapred_system", "mapred.system.dir");
|
||||
createDirAndSetProperty("mapred_temp", "mapred.temp.dir");
|
||||
}
|
||||
|
||||
private String createDirAndSetProperty(final String relPath, String property) {
|
||||
|
@ -1333,16 +1315,12 @@ public class HBaseTestingUtility {
|
|||
// Allow the user to override FS URI for this map-reduce cluster to use.
|
||||
mrCluster = new MiniMRCluster(servers,
|
||||
FS_URI != null ? FS_URI : FileSystem.get(conf).getUri().toString(), 1);
|
||||
JobConf jobConf = MapreduceTestingShim.getJobConf(mrCluster);
|
||||
if (jobConf == null) {
|
||||
jobConf = mrCluster.createJobConf();
|
||||
}
|
||||
jobConf.set("mapred.local.dir",
|
||||
conf.get("mapred.local.dir")); //Hadoop MiniMR overwrites this while it should not
|
||||
JobConf jobConf = mrCluster.createJobConf();
|
||||
LOG.info("Mini mapreduce cluster started");
|
||||
|
||||
// Needed for TestImportTsv.
|
||||
conf.set("mapred.job.tracker", jobConf.get("mapred.job.tracker"));
|
||||
// This fixes TestImportTsv but breaks TestImportExport tests
|
||||
conf.set("mapred.job.tracker",
|
||||
mrCluster.createJobConf().get("mapred.job.tracker"));
|
||||
// this for mrv2 support; mr1 ignores this
|
||||
conf.set("mapreduce.framework.name", "yarn");
|
||||
String rmAdress = jobConf.get("yarn.resourcemanager.address");
|
||||
|
|
|
@ -19,12 +19,9 @@ package org.apache.hadoop.hbase.mapreduce;
|
|||
|
||||
import java.io.IOException;
|
||||
import java.lang.reflect.Constructor;
|
||||
import java.lang.reflect.InvocationTargetException;
|
||||
import java.lang.reflect.Method;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.mapred.JobConf;
|
||||
import org.apache.hadoop.mapred.MiniMRCluster;
|
||||
import org.apache.hadoop.mapreduce.Job;
|
||||
import org.apache.hadoop.mapreduce.JobContext;
|
||||
import org.apache.hadoop.mapreduce.JobID;
|
||||
|
@ -37,7 +34,6 @@ import org.apache.hadoop.mapreduce.JobID;
|
|||
*/
|
||||
abstract public class MapreduceTestingShim {
|
||||
private static MapreduceTestingShim instance;
|
||||
private static Class[] emptyParam = new Class[] {};
|
||||
|
||||
static {
|
||||
try {
|
||||
|
@ -52,8 +48,6 @@ abstract public class MapreduceTestingShim {
|
|||
|
||||
abstract public JobContext newJobContext(Configuration jobConf)
|
||||
throws IOException;
|
||||
|
||||
abstract public JobConf obtainJobConf(MiniMRCluster cluster);
|
||||
|
||||
abstract public String obtainMROutputDirProp();
|
||||
|
||||
|
@ -61,10 +55,6 @@ abstract public class MapreduceTestingShim {
|
|||
throws IOException {
|
||||
return instance.newJobContext(jobConf);
|
||||
}
|
||||
|
||||
public static JobConf getJobConf(MiniMRCluster cluster) {
|
||||
return instance.obtainJobConf(cluster);
|
||||
}
|
||||
|
||||
public static String getMROutputDirProp() {
|
||||
return instance.obtainMROutputDirProp();
|
||||
|
@ -84,23 +74,6 @@ abstract public class MapreduceTestingShim {
|
|||
"Failed to instantiate new JobContext(jobConf, new JobID())", e);
|
||||
}
|
||||
}
|
||||
|
||||
public JobConf obtainJobConf(MiniMRCluster cluster) {
|
||||
if (cluster == null) return null;
|
||||
try {
|
||||
Object runner = cluster.getJobTrackerRunner();
|
||||
Method meth = runner.getClass().getDeclaredMethod("getJobTracker", emptyParam);
|
||||
Object tracker = meth.invoke(runner, new Object []{});
|
||||
Method m = tracker.getClass().getDeclaredMethod("getConf", emptyParam);
|
||||
return (JobConf) m.invoke(tracker, new Object []{});
|
||||
} catch (NoSuchMethodException nsme) {
|
||||
return null;
|
||||
} catch (InvocationTargetException ite) {
|
||||
return null;
|
||||
} catch (IllegalAccessException iae) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String obtainMROutputDirProp() {
|
||||
|
@ -121,19 +94,6 @@ abstract public class MapreduceTestingShim {
|
|||
"Failed to return from Job.getInstance(jobConf)");
|
||||
}
|
||||
}
|
||||
|
||||
public JobConf obtainJobConf(MiniMRCluster cluster) {
|
||||
try {
|
||||
Method meth = MiniMRCluster.class.getMethod("getJobTrackerConf", emptyParam);
|
||||
return (JobConf) meth.invoke(cluster, new Object []{});
|
||||
} catch (NoSuchMethodException nsme) {
|
||||
return null;
|
||||
} catch (InvocationTargetException ite) {
|
||||
return null;
|
||||
} catch (IllegalAccessException iae) {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
@Override
|
||||
public String obtainMROutputDirProp() {
|
||||
|
|
|
@ -17,6 +17,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.mapreduce;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
|
@ -27,7 +28,6 @@ import org.apache.hadoop.hbase.HColumnDescriptor;
|
|||
import org.apache.hadoop.hbase.HTableDescriptor;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.MediumTests;
|
||||
import org.apache.hadoop.hbase.MiniHBaseCluster;
|
||||
import org.apache.hadoop.hbase.client.Delete;
|
||||
import org.apache.hadoop.hbase.client.Get;
|
||||
import org.apache.hadoop.hbase.client.HTable;
|
||||
|
@ -36,6 +36,7 @@ import org.apache.hadoop.hbase.client.Result;
|
|||
import org.apache.hadoop.hbase.client.ResultScanner;
|
||||
import org.apache.hadoop.hbase.client.Scan;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.HBaseFsck;
|
||||
import org.apache.hadoop.mapreduce.Job;
|
||||
import org.apache.hadoop.util.GenericOptionsParser;
|
||||
import org.junit.After;
|
||||
|
@ -44,7 +45,6 @@ import org.junit.Before;
|
|||
import org.junit.BeforeClass;
|
||||
import org.junit.Test;
|
||||
import org.junit.experimental.categories.Category;
|
||||
import static org.junit.Assert.assertEquals;
|
||||
|
||||
@Category(MediumTests.class)
|
||||
public class TestImportExport {
|
||||
|
@ -58,12 +58,11 @@ public class TestImportExport {
|
|||
private static final byte[] QUAL = Bytes.toBytes("q");
|
||||
private static final String OUTPUT_DIR = "outputdir";
|
||||
|
||||
private static MiniHBaseCluster cluster;
|
||||
private static long now = System.currentTimeMillis();
|
||||
|
||||
@BeforeClass
|
||||
public static void beforeClass() throws Exception {
|
||||
cluster = UTIL.startMiniCluster();
|
||||
UTIL.startMiniCluster();
|
||||
UTIL.startMiniMapReduceCluster();
|
||||
}
|
||||
|
||||
|
@ -105,16 +104,16 @@ public class TestImportExport {
|
|||
"1000"
|
||||
};
|
||||
|
||||
GenericOptionsParser opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args);
|
||||
GenericOptionsParser opts = new GenericOptionsParser(new Configuration(UTIL.getConfiguration()), args);
|
||||
Configuration conf = opts.getConfiguration();
|
||||
args = opts.getRemainingArgs();
|
||||
|
||||
Job job = Export.createSubmittableJob(conf, args);
|
||||
job.getConfiguration().set("mapreduce.framework.name", "yarn");
|
||||
job.waitForCompletion(false);
|
||||
HBaseFsck.debugLsr(conf, new Path("."));
|
||||
assertTrue(job.isSuccessful());
|
||||
|
||||
|
||||
String IMPORT_TABLE = "importTableSimpleCase";
|
||||
t = UTIL.createTable(Bytes.toBytes(IMPORT_TABLE), FAMILYB);
|
||||
args = new String[] {
|
||||
|
@ -123,13 +122,14 @@ public class TestImportExport {
|
|||
OUTPUT_DIR
|
||||
};
|
||||
|
||||
opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args);
|
||||
opts = new GenericOptionsParser(new Configuration(UTIL.getConfiguration()), args);
|
||||
conf = opts.getConfiguration();
|
||||
args = opts.getRemainingArgs();
|
||||
|
||||
job = Import.createSubmittableJob(conf, args);
|
||||
job.getConfiguration().set("mapreduce.framework.name", "yarn");
|
||||
job.waitForCompletion(false);
|
||||
HBaseFsck.debugLsr(conf, new Path("."));
|
||||
assertTrue(job.isSuccessful());
|
||||
|
||||
Get g = new Get(ROW1);
|
||||
|
@ -174,7 +174,7 @@ public class TestImportExport {
|
|||
"1000"
|
||||
};
|
||||
|
||||
GenericOptionsParser opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args);
|
||||
GenericOptionsParser opts = new GenericOptionsParser(new Configuration(UTIL.getConfiguration()), args);
|
||||
Configuration conf = opts.getConfiguration();
|
||||
args = opts.getRemainingArgs();
|
||||
|
||||
|
@ -198,7 +198,7 @@ public class TestImportExport {
|
|||
OUTPUT_DIR
|
||||
};
|
||||
|
||||
opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args);
|
||||
opts = new GenericOptionsParser(new Configuration(UTIL.getConfiguration()), args);
|
||||
conf = opts.getConfiguration();
|
||||
args = opts.getRemainingArgs();
|
||||
|
||||
|
|
Loading…
Reference in New Issue