HBASE-7904 Make mapreduce jobs pass based on 2.0.4-alpha

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1462686 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Zhihong Yu 2013-03-30 02:26:30 +00:00
parent e4d696d669
commit 534198cd36
4 changed files with 15 additions and 5 deletions

View File

@ -484,7 +484,7 @@ public class HRegion implements HeapSize { // , Writable{
this.rsAccounting = this.rsServices.getRegionServerAccounting(); this.rsAccounting = this.rsServices.getRegionServerAccounting();
// don't initialize coprocessors if not running within a regionserver // don't initialize coprocessors if not running within a regionserver
// TODO: revisit if coprocessors should load in other cases // TODO: revisit if coprocessors should load in other cases
this.coprocessorHost = new RegionCoprocessorHost(this, rsServices, conf); this.coprocessorHost = new RegionCoprocessorHost(this, rsServices, baseConf);
this.metricsRegionWrapper = new MetricsRegionWrapperImpl(this); this.metricsRegionWrapper = new MetricsRegionWrapperImpl(this);
this.metricsRegion = new MetricsRegion(this.metricsRegionWrapper); this.metricsRegion = new MetricsRegion(this.metricsRegionWrapper);
} else { } else {

View File

@ -1,5 +1,3 @@
/** /**
* *
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
@ -38,8 +36,10 @@ import java.util.Arrays;
import java.util.Collection; import java.util.Collection;
import java.util.Collections; import java.util.Collections;
import java.util.HashSet; import java.util.HashSet;
import java.util.Iterator;
import java.util.List; import java.util.List;
import java.util.Map; import java.util.Map;
import java.util.Map.Entry;
import java.util.NavigableSet; import java.util.NavigableSet;
import java.util.Random; import java.util.Random;
import java.util.Set; import java.util.Set;
@ -1656,6 +1656,7 @@ public class HBaseTestingUtility extends HBaseCommonTestingUtility {
if (jobConf == null) { if (jobConf == null) {
jobConf = mrCluster.createJobConf(); jobConf = mrCluster.createJobConf();
} }
HBaseConfiguration.merge(this.conf, jobConf);
jobConf.set("mapred.local.dir", jobConf.set("mapred.local.dir",
conf.get("mapred.local.dir")); //Hadoop MiniMR overwrites this while it should not conf.get("mapred.local.dir")); //Hadoop MiniMR overwrites this while it should not
LOG.info("Mini mapreduce cluster started"); LOG.info("Mini mapreduce cluster started");

View File

@ -228,8 +228,11 @@ public class TestImportExport {
OUTPUT_DIR OUTPUT_DIR
}; };
GenericOptionsParser opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args); GenericOptionsParser opts = new GenericOptionsParser(new Configuration(
cluster.getConfiguration()), args);
Configuration conf = opts.getConfiguration(); Configuration conf = opts.getConfiguration();
// copy or add the necessary configuration values from the map reduce config to the hbase config
copyConfigurationValues(UTIL.getConfiguration(), conf);
args = opts.getRemainingArgs(); args = opts.getRemainingArgs();
assertEquals(conf.get(Export.EXPORT_BATCHING), EXPORT_BATCH_SIZE); assertEquals(conf.get(Export.EXPORT_BATCHING), EXPORT_BATCH_SIZE);
@ -350,6 +353,8 @@ public class TestImportExport {
GenericOptionsParser opts = new GenericOptionsParser(new Configuration( GenericOptionsParser opts = new GenericOptionsParser(new Configuration(
cluster.getConfiguration()), args); cluster.getConfiguration()), args);
Configuration conf = opts.getConfiguration(); Configuration conf = opts.getConfiguration();
// copy or add the necessary configuration values from the map reduce config to the hbase config
copyConfigurationValues(UTIL.getConfiguration(), conf);
args = opts.getRemainingArgs(); args = opts.getRemainingArgs();
Job job = Export.createSubmittableJob(conf, args); Job job = Export.createSubmittableJob(conf, args);
@ -369,6 +374,8 @@ public class TestImportExport {
opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args); opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args);
conf = opts.getConfiguration(); conf = opts.getConfiguration();
// copy or add the necessary configuration values from the map reduce config to the hbase config
copyConfigurationValues(UTIL.getConfiguration(), conf);
args = opts.getRemainingArgs(); args = opts.getRemainingArgs();
job = Import.createSubmittableJob(conf, args); job = Import.createSubmittableJob(conf, args);
@ -392,6 +399,8 @@ public class TestImportExport {
opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args); opts = new GenericOptionsParser(new Configuration(cluster.getConfiguration()), args);
conf = opts.getConfiguration(); conf = opts.getConfiguration();
// copy or add the necessary configuration values from the map reduce config to the hbase config
copyConfigurationValues(UTIL.getConfiguration(), conf);
args = opts.getRemainingArgs(); args = opts.getRemainingArgs();
job = Import.createSubmittableJob(conf, args); job = Import.createSubmittableJob(conf, args);

View File

@ -732,7 +732,7 @@
<buildDate>${maven.build.timestamp}</buildDate> <buildDate>${maven.build.timestamp}</buildDate>
<compileSource>1.6</compileSource> <compileSource>1.6</compileSource>
<!-- Dependencies --> <!-- Dependencies -->
<hadoop-two.version>2.0.2-alpha</hadoop-two.version> <hadoop-two.version>2.0.4-SNAPSHOT</hadoop-two.version>
<hadoop-one.version>1.1.2</hadoop-one.version> <hadoop-one.version>1.1.2</hadoop-one.version>
<commons-cli.version>1.2</commons-cli.version> <commons-cli.version>1.2</commons-cli.version>
<commons-codec.version>1.7</commons-codec.version> <commons-codec.version>1.7</commons-codec.version>