HBASE-405 TIF and TOF use log4j directly rather than apache commons-logging
git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@653941 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
ab778e22e4
commit
8df7f79781
|
@ -30,6 +30,7 @@ Hbase Change Log
|
|||
HBASE-609 Master doesn't see regionserver edits because of clock skew
|
||||
HBASE-607 MultiRegionTable.makeMultiRegionTable is not deterministic enough
|
||||
for regression tests
|
||||
HBASE-405 TIF and TOF use log4j directly rather than apache commons-logging
|
||||
|
||||
IMPROVEMENTS
|
||||
HBASE-559 MR example job to count table rows
|
||||
|
|
|
@ -20,12 +20,14 @@
|
|||
package org.apache.hadoop.hbase.mapred;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.Map;
|
||||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.io.MapWritable;
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.client.HTable;
|
||||
import org.apache.hadoop.hbase.io.BatchUpdate;
|
||||
import org.apache.hadoop.io.Text;
|
||||
import org.apache.hadoop.io.Writable;
|
||||
import org.apache.hadoop.mapred.FileAlreadyExistsException;
|
||||
import org.apache.hadoop.mapred.InvalidJobConfException;
|
||||
import org.apache.hadoop.mapred.JobConf;
|
||||
|
@ -34,28 +36,14 @@ import org.apache.hadoop.mapred.RecordWriter;
|
|||
import org.apache.hadoop.mapred.Reporter;
|
||||
import org.apache.hadoop.util.Progressable;
|
||||
|
||||
import org.apache.hadoop.hbase.HBaseConfiguration;
|
||||
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
|
||||
import org.apache.hadoop.hbase.client.HTable;
|
||||
import org.apache.hadoop.hbase.io.BatchUpdate;
|
||||
|
||||
import org.apache.log4j.Logger;
|
||||
|
||||
/**
|
||||
* Convert Map/Reduce output and write it to an HBase table
|
||||
*/
|
||||
public class TableOutputFormat
|
||||
extends OutputFormatBase<Text, BatchUpdate> {
|
||||
public class TableOutputFormat extends OutputFormatBase<Text, BatchUpdate> {
|
||||
|
||||
/** JobConf parameter that specifies the output table */
|
||||
public static final String OUTPUT_TABLE = "hbase.mapred.outputtable";
|
||||
|
||||
static final Logger LOG = Logger.getLogger(TableOutputFormat.class.getName());
|
||||
|
||||
/** constructor */
|
||||
public TableOutputFormat() {
|
||||
super();
|
||||
}
|
||||
private final Log LOG = LogFactory.getLog(TableOutputFormat.class);
|
||||
|
||||
/**
|
||||
* Convert Reduce output (key, value) to (HStoreKey, KeyedDataArrayWritable)
|
||||
|
|
Loading…
Reference in New Issue