diff --git a/CHANGES.txt b/CHANGES.txt index 849af93993b..7786a830e0c 100644 --- a/CHANGES.txt +++ b/CHANGES.txt @@ -57,6 +57,7 @@ Trunk (unreleased changes) HADOOP-1835 Updated Documentation for HBase setup/installation (Izaak Rubin via Stack) HADOOP-1868 Make default configuration more responsive + HADOOP-1884 Remove useless debugging log messages from hbase.mapred Below are the list of changes before 2007-08-18 diff --git a/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java b/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java index 7183b27531c..12e56d0f89e 100644 --- a/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java +++ b/src/java/org/apache/hadoop/hbase/mapred/TableInputFormat.java @@ -77,18 +77,14 @@ implements InputFormat, JobConfigurable { * @throws IOException */ public TableRecordReader(Text startRow, Text endRow) throws IOException { - LOG.debug("start construct"); m_row = new TreeMap(); m_scanner = m_table.obtainScanner(m_cols, startRow); m_endRow = endRow; - LOG.debug("end construct"); } /** {@inheritDoc} */ public void close() throws IOException { - LOG.debug("start close"); m_scanner.close(); - LOG.debug("end close"); } /** @@ -135,7 +131,6 @@ implements InputFormat, JobConfigurable { */ @SuppressWarnings("unchecked") public boolean next(HStoreKey key, MapWritable value) throws IOException { - LOG.debug("start next"); m_row.clear(); HStoreKey tKey = key; boolean hasMore = m_scanner.next(tKey, m_row); @@ -152,7 +147,6 @@ implements InputFormat, JobConfigurable { } } } - LOG.debug("end next"); return hasMore; } @@ -175,8 +169,6 @@ implements InputFormat, JobConfigurable { */ @SuppressWarnings("unused") public InputSplit[] getSplits(JobConf job, int numSplits) throws IOException { - LOG.debug("start getSplits"); - Text[] startKeys = m_table.getStartKeys(); if(startKeys == null || startKeys.length == 0) { throw new IOException("Expecting at least one region"); @@ -185,15 +177,15 @@ implements InputFormat, JobConfigurable { for(int i = 0; i < startKeys.length; i++) { splits[i] = new TableSplit(m_tableName, startKeys[i], ((i + 1) < startKeys.length) ? startKeys[i + 1] : new Text()); - LOG.debug("split: " + i + "->" + splits[i]); + if (LOG.isDebugEnabled()) { + LOG.debug("split: " + i + "->" + splits[i]); + } } - LOG.debug("end splits"); return splits; } /** {@inheritDoc} */ public void configure(JobConf job) { - LOG.debug("start configure"); Path[] tableNames = job.getInputPaths(); m_tableName = new Text(tableNames[0].getName()); String colArg = job.get(COLUMN_LIST); @@ -207,7 +199,6 @@ implements InputFormat, JobConfigurable { } catch (Exception e) { LOG.error(e); } - LOG.debug("end configure"); } /** {@inheritDoc} */ diff --git a/src/java/org/apache/hadoop/hbase/mapred/TableMap.java b/src/java/org/apache/hadoop/hbase/mapred/TableMap.java index 65071d0a028..515b49907ec 100644 --- a/src/java/org/apache/hadoop/hbase/mapred/TableMap.java +++ b/src/java/org/apache/hadoop/hbase/mapred/TableMap.java @@ -32,7 +32,6 @@ import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.Reporter; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.HStoreKey; -import org.apache.log4j.Logger; /** * Scan an HBase table to sort by a specified sort column. @@ -41,9 +40,6 @@ import org.apache.log4j.Logger; */ @SuppressWarnings("unchecked") public abstract class TableMap extends MapReduceBase implements Mapper { - - private static final Logger LOG = Logger.getLogger(TableMap.class.getName()); - private TableOutputCollector m_collector; /** constructor*/ @@ -86,12 +82,10 @@ public abstract class TableMap extends MapReduceBase implements Mapper { public void map(WritableComparable key, Writable value, OutputCollector output, Reporter reporter) throws IOException { - LOG.debug("start map"); if(m_collector.collector == null) { m_collector.collector = output; } map((HStoreKey)key, (MapWritable)value, m_collector, reporter); - LOG.debug("end map"); } /** diff --git a/src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java b/src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java index 197370f4e15..88710029e30 100644 --- a/src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java +++ b/src/java/org/apache/hadoop/hbase/mapred/TableOutputFormat.java @@ -75,22 +75,13 @@ public class TableOutputFormat /** {@inheritDoc} */ public void write(Text key, MapWritable value) throws IOException { - LOG.debug("start write"); - - // start transaction - - long xid = m_table.startUpdate(key); + long xid = m_table.startUpdate(key); // start transaction for (Map.Entry e: value.entrySet()) { m_table.put(xid, (Text)e.getKey(), ((ImmutableBytesWritable)e.getValue()).get()); } - - // end transaction - - m_table.commit(xid); - - LOG.debug("end write"); + m_table.commit(xid); // end transaction } } @@ -105,7 +96,6 @@ public class TableOutputFormat // expecting exactly one path - LOG.debug("start get writer"); Text tableName = new Text(job.get(OUTPUT_TABLE)); HTable table = null; try { @@ -114,7 +104,6 @@ public class TableOutputFormat LOG.error(e); throw e; } - LOG.debug("end get writer"); return new TableRecordWriter(table); } diff --git a/src/java/org/apache/hadoop/hbase/mapred/TableReduce.java b/src/java/org/apache/hadoop/hbase/mapred/TableReduce.java index f099d08b950..4c81d343e2b 100644 --- a/src/java/org/apache/hadoop/hbase/mapred/TableReduce.java +++ b/src/java/org/apache/hadoop/hbase/mapred/TableReduce.java @@ -29,16 +29,12 @@ import org.apache.hadoop.mapred.MapReduceBase; import org.apache.hadoop.mapred.OutputCollector; import org.apache.hadoop.mapred.Reducer; import org.apache.hadoop.mapred.Reporter; -import org.apache.log4j.Logger; /** * Write a table, sorting by the input key */ @SuppressWarnings("unchecked") public abstract class TableReduce extends MapReduceBase implements Reducer { - private static final Logger LOG = - Logger.getLogger(TableReduce.class.getName()); - TableOutputCollector m_collector; /** Constructor */ @@ -71,12 +67,11 @@ public abstract class TableReduce extends MapReduceBase implements Reducer { @SuppressWarnings("unchecked") public void reduce(WritableComparable key, Iterator values, OutputCollector output, Reporter reporter) throws IOException { - LOG.debug("start reduce"); + if(m_collector.collector == null) { m_collector.collector = output; } reduce((Text)key, values, m_collector, reporter); - LOG.debug("end reduce"); } /**