HBASE-405 TIF and TOF use log4j directly rather than apache commons-logging

git-svn-id: https://svn.apache.org/repos/asf/hadoop/hbase/trunk@653941 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Jim Kellerman 2008-05-06 22:18:24 +00:00
parent ab778e22e4
commit 8df7f79781
2 changed files with 110 additions and 121 deletions

View File

@ -30,6 +30,7 @@ Hbase Change Log
HBASE-609 Master doesn't see regionserver edits because of clock skew HBASE-609 Master doesn't see regionserver edits because of clock skew
HBASE-607 MultiRegionTable.makeMultiRegionTable is not deterministic enough HBASE-607 MultiRegionTable.makeMultiRegionTable is not deterministic enough
for regression tests for regression tests
HBASE-405 TIF and TOF use log4j directly rather than apache commons-logging
IMPROVEMENTS IMPROVEMENTS
HBASE-559 MR example job to count table rows HBASE-559 MR example job to count table rows

View File

@ -1,121 +1,109 @@
/** /**
* Copyright 2007 The Apache Software Foundation * Copyright 2007 The Apache Software Foundation
* *
* Licensed to the Apache Software Foundation (ASF) under one * Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file * or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information * distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file * regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the * to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance * "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at * with the License. You may obtain a copy of the License at
* *
* http://www.apache.org/licenses/LICENSE-2.0 * http://www.apache.org/licenses/LICENSE-2.0
* *
* Unless required by applicable law or agreed to in writing, software * Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, * distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
package org.apache.hadoop.hbase.mapred; package org.apache.hadoop.hbase.mapred;
import java.io.IOException; import java.io.IOException;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.hadoop.fs.FileSystem; import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.io.MapWritable; import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.io.Text; import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.io.Writable; import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.mapred.FileAlreadyExistsException; import org.apache.hadoop.hbase.io.BatchUpdate;
import org.apache.hadoop.mapred.InvalidJobConfException; import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.JobConf; import org.apache.hadoop.mapred.FileAlreadyExistsException;
import org.apache.hadoop.mapred.OutputFormatBase; import org.apache.hadoop.mapred.InvalidJobConfException;
import org.apache.hadoop.mapred.RecordWriter; import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.mapred.OutputFormatBase;
import org.apache.hadoop.util.Progressable; import org.apache.hadoop.mapred.RecordWriter;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.hbase.HBaseConfiguration; import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.client.HTable; /**
import org.apache.hadoop.hbase.io.BatchUpdate; * Convert Map/Reduce output and write it to an HBase table
*/
import org.apache.log4j.Logger; public class TableOutputFormat extends OutputFormatBase<Text, BatchUpdate> {
/** /** JobConf parameter that specifies the output table */
* Convert Map/Reduce output and write it to an HBase table public static final String OUTPUT_TABLE = "hbase.mapred.outputtable";
*/ private final Log LOG = LogFactory.getLog(TableOutputFormat.class);
public class TableOutputFormat
extends OutputFormatBase<Text, BatchUpdate> { /**
* Convert Reduce output (key, value) to (HStoreKey, KeyedDataArrayWritable)
/** JobConf parameter that specifies the output table */ * and write to an HBase table
public static final String OUTPUT_TABLE = "hbase.mapred.outputtable"; */
protected class TableRecordWriter
static final Logger LOG = Logger.getLogger(TableOutputFormat.class.getName()); implements RecordWriter<Text, BatchUpdate> {
private HTable m_table;
/** constructor */
public TableOutputFormat() { /**
super(); * Instantiate a TableRecordWriter with the HBase HClient for writing.
} *
* @param table
/** */
* Convert Reduce output (key, value) to (HStoreKey, KeyedDataArrayWritable) public TableRecordWriter(HTable table) {
* and write to an HBase table m_table = table;
*/ }
protected class TableRecordWriter
implements RecordWriter<Text, BatchUpdate> { /** {@inheritDoc} */
private HTable m_table; public void close(@SuppressWarnings("unused") Reporter reporter) {
// Nothing to do.
/** }
* Instantiate a TableRecordWriter with the HBase HClient for writing.
* /** {@inheritDoc} */
* @param table public void write(Text key, BatchUpdate value) throws IOException {
*/ m_table.commit(value);
public TableRecordWriter(HTable table) { }
m_table = table; }
}
/** {@inheritDoc} */
/** {@inheritDoc} */ @Override
public void close(@SuppressWarnings("unused") Reporter reporter) { @SuppressWarnings("unchecked")
// Nothing to do. public RecordWriter getRecordWriter(
} @SuppressWarnings("unused") FileSystem ignored,
JobConf job,
/** {@inheritDoc} */ @SuppressWarnings("unused") String name,
public void write(Text key, BatchUpdate value) throws IOException { @SuppressWarnings("unused") Progressable progress) throws IOException {
m_table.commit(value);
} // expecting exactly one path
}
Text tableName = new Text(job.get(OUTPUT_TABLE));
/** {@inheritDoc} */ HTable table = null;
@Override try {
@SuppressWarnings("unchecked") table = new HTable(new HBaseConfiguration(job), tableName);
public RecordWriter getRecordWriter( } catch(IOException e) {
@SuppressWarnings("unused") FileSystem ignored, LOG.error(e);
JobConf job, throw e;
@SuppressWarnings("unused") String name, }
@SuppressWarnings("unused") Progressable progress) throws IOException { return new TableRecordWriter(table);
}
// expecting exactly one path
/** {@inheritDoc} */
Text tableName = new Text(job.get(OUTPUT_TABLE)); @Override
HTable table = null; @SuppressWarnings("unused")
try { public void checkOutputSpecs(FileSystem ignored, JobConf job)
table = new HTable(new HBaseConfiguration(job), tableName); throws FileAlreadyExistsException, InvalidJobConfException, IOException {
} catch(IOException e) {
LOG.error(e); String tableName = job.get(OUTPUT_TABLE);
throw e; if(tableName == null) {
} throw new IOException("Must specify table name");
return new TableRecordWriter(table); }
} }
}
/** {@inheritDoc} */
@Override
@SuppressWarnings("unused")
public void checkOutputSpecs(FileSystem ignored, JobConf job)
throws FileAlreadyExistsException, InvalidJobConfException, IOException {
String tableName = job.get(OUTPUT_TABLE);
if(tableName == null) {
throw new IOException("Must specify table name");
}
}
}