HBASE-2086 Job(configuration,String) deprecated

git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@895515 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Michael Stack 2010-01-04 00:23:11 +00:00
parent 70312ea625
commit f4c8d1a06c
7 changed files with 20 additions and 7 deletions

View File

@ -279,6 +279,7 @@ Release 0.21.0 - Unreleased
HBASE-2085 StringBuffer -> StringBuilder - conversion of references as necessary
(Kay Kay via Stack)
HBASE-2052 Upper bound of outstanding WALs can be overrun
HBASE-2086 Job(configuration,String) deprecated (Kay Kay via Stack)
NEW FEATURES
HBASE-1901 "General" partitioner for "hbase-48" bulk (behind the api, write

View File

@ -47,7 +47,7 @@ public class HBaseConfiguration extends Configuration {
}
/**
* Instantinating HBaseConfiguration() is deprecated. Please use
* Instantiating HBaseConfiguration() is deprecated. Please use
* HBaseConfiguration#create(conf) to construct a plain Configuration
*/
@Deprecated

View File

@ -29,6 +29,7 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.mapreduce.Cluster;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
@ -123,8 +124,9 @@ public class BuildTableIndex {
iconf.addFromXML(content);
conf.set("hbase.index.conf", content);
}
Job job = new Job(conf, "build index for table " + tableName);
Cluster mrCluster = new Cluster(conf);
Job job = Job.getInstance(mrCluster, conf);
job.setJobName("build index for table " + tableName);
// number of indexes to partition into
job.setNumReduceTasks(numReduceTasks);
Scan scan = new Scan();

View File

@ -27,6 +27,7 @@ import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.mapreduce.Cluster;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;
import org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat;
@ -78,7 +79,9 @@ public class Export {
throws IOException {
String tableName = args[0];
Path outputDir = new Path(args[1]);
Job job = new Job(conf, NAME + "_" + tableName);
Cluster mrCluster = new Cluster(conf);
Job job = Job.getInstance(mrCluster, conf);
job.setJobName(NAME + "_" + tableName);
job.setJarByClass(Exporter.class);
// TODO: Allow passing filter and subset of rows/columns.
Scan s = new Scan();

View File

@ -28,6 +28,7 @@ import org.apache.hadoop.hbase.KeyValue;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.mapreduce.Cluster;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat;
@ -85,7 +86,10 @@ public class Import {
throws IOException {
String tableName = args[0];
Path inputDir = new Path(args[1]);
Job job = new Job(conf, NAME + "_" + tableName);
Cluster mrCluster = new Cluster(conf);
Job job = Job.getInstance(mrCluster, conf);
job.setJobName(NAME + "_" + tableName);
job.setJarByClass(Importer.class);
FileInputFormat.setInputPaths(job, inputDir);
job.setInputFormatClass(SequenceFileInputFormat.class);

View File

@ -29,6 +29,7 @@ import org.apache.hadoop.hbase.client.Scan;
import org.apache.hadoop.hbase.filter.FirstKeyOnlyFilter;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.mapreduce.Cluster;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat;
import org.apache.hadoop.util.GenericOptionsParser;
@ -85,7 +86,9 @@ public class RowCounter {
public static Job createSubmittableJob(Configuration conf, String[] args)
throws IOException {
String tableName = args[0];
Job job = new Job(conf, NAME + "_" + tableName);
Cluster mrCluster = new Cluster(conf);
Job job = Job.getInstance(mrCluster, conf);
job.setJobName(NAME + "_" + tableName);
job.setJarByClass(RowCounter.class);
// Columns are space delimited
StringBuilder sb = new StringBuilder();

View File

@ -348,7 +348,7 @@ public class HLog implements HConstants, Syncable {
Path oldFile = cleanupCurrentWriter(this.filenum);
this.filenum = System.currentTimeMillis();
Path newPath = computeFilename(this.filenum);
this.writer = createWriter(fs, newPath, new HBaseConfiguration(conf));
this.writer = createWriter(fs, newPath, HBaseConfiguration.create(conf));
LOG.info((oldFile != null?
"Roll " + FSUtils.getPath(oldFile) + ", entries=" +
this.numEntries.get() +