HBASE-8077 Configure the job name in ImportTsv

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1457632 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2013-03-18 04:04:17 +00:00
parent 54c5d944ae
commit 5d1e0a7055
1 changed files with 4 additions and 1 deletions

View File

@ -73,6 +73,7 @@ public class ImportTsv extends Configured implements Tool {
public final static String MAPPER_CONF_KEY = "importtsv.mapper.class"; public final static String MAPPER_CONF_KEY = "importtsv.mapper.class";
public final static String BULK_OUTPUT_CONF_KEY = "importtsv.bulk.output"; public final static String BULK_OUTPUT_CONF_KEY = "importtsv.bulk.output";
public final static String TIMESTAMP_CONF_KEY = "importtsv.timestamp"; public final static String TIMESTAMP_CONF_KEY = "importtsv.timestamp";
public final static String JOB_NAME_CONF_KEY = "mapred.job.name";
// TODO: the rest of these configs are used exclusively by TsvImporterMapper. // TODO: the rest of these configs are used exclusively by TsvImporterMapper.
// Move them out of the tool and let the mapper handle its own validation. // Move them out of the tool and let the mapper handle its own validation.
public final static String SKIP_LINES_CONF_KEY = "importtsv.skip.bad.lines"; public final static String SKIP_LINES_CONF_KEY = "importtsv.skip.bad.lines";
@ -279,7 +280,8 @@ public class ImportTsv extends Configured implements Tool {
String tableName = args[0]; String tableName = args[0];
Path inputDir = new Path(args[1]); Path inputDir = new Path(args[1]);
Job job = new Job(conf, NAME + "_" + tableName); String jobName = conf.get(JOB_NAME_CONF_KEY,NAME + "_" + tableName);
Job job = new Job(conf, jobName);
job.setJarByClass(mapperClass); job.setJarByClass(mapperClass);
FileInputFormat.setInputPaths(job, inputDir); FileInputFormat.setInputPaths(job, inputDir);
job.setInputFormatClass(TextInputFormat.class); job.setInputFormatClass(TextInputFormat.class);
@ -370,6 +372,7 @@ public class ImportTsv extends Configured implements Tool {
" -D" + TIMESTAMP_CONF_KEY + "=currentTimeAsLong - use the specified timestamp for the import\n" + " -D" + TIMESTAMP_CONF_KEY + "=currentTimeAsLong - use the specified timestamp for the import\n" +
" -D" + MAPPER_CONF_KEY + "=my.Mapper - A user-defined Mapper to use instead of " + " -D" + MAPPER_CONF_KEY + "=my.Mapper - A user-defined Mapper to use instead of " +
DEFAULT_MAPPER.getName() + "\n" + DEFAULT_MAPPER.getName() + "\n" +
" -D" + JOB_NAME_CONF_KEY + "=jobName - use the specified mapreduce job name for the import\n" +
"For performance consider the following options:\n" + "For performance consider the following options:\n" +
" -Dmapred.map.tasks.speculative.execution=false\n" + " -Dmapred.map.tasks.speculative.execution=false\n" +
" -Dmapred.reduce.tasks.speculative.execution=false"; " -Dmapred.reduce.tasks.speculative.execution=false";