From e6905a14446e9d795a797e091d5b35a75fe370d6 Mon Sep 17 00:00:00 2001 From: Jerry He Date: Tue, 29 Sep 2015 19:48:02 -0700 Subject: [PATCH] HBASE-14394 Properly close the connection after reading records from table: addendum --- .../mapreduce/MultiTableInputFormatBase.java | 53 +++++++++++++++---- .../hbase/mapreduce/TableRecordReader.java | 9 +--- 2 files changed, 45 insertions(+), 17 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java index 83dca4bc340..ff690c83bd5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultiTableInputFormatBase.java @@ -29,7 +29,6 @@ import org.apache.hadoop.hbase.classification.InterfaceStability; import org.apache.hadoop.hbase.HRegionInfo; import org.apache.hadoop.hbase.HRegionLocation; import org.apache.hadoop.hbase.TableName; -import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.ConnectionFactory; import org.apache.hadoop.hbase.client.RegionLocator; @@ -92,29 +91,65 @@ public abstract class MultiTableInputFormatBase extends + " previous error. Please look at the previous logs lines from" + " the task's full log for more details."); } - Connection connection = ConnectionFactory.createConnection(context.getConfiguration()); + final Connection connection = ConnectionFactory.createConnection(context.getConfiguration()); Table table = connection.getTable(tSplit.getTable()); - TableRecordReader trr = this.tableRecordReader; + if (this.tableRecordReader == null) { + this.tableRecordReader = new TableRecordReader(); + } + final TableRecordReader trr = this.tableRecordReader; try { - // if no table record reader was provided use default - if (trr == null) { - trr = new TableRecordReader(); - } Scan sc = tSplit.getScan(); sc.setStartRow(tSplit.getStartRow()); sc.setStopRow(tSplit.getEndRow()); trr.setScan(sc); trr.setTable(table); - trr.setConnection(connection); + return new RecordReader() { + + @Override + public void close() throws IOException { + trr.close(); + if (connection != null) { + connection.close(); + } + } + + @Override + public ImmutableBytesWritable getCurrentKey() throws IOException, InterruptedException { + return trr.getCurrentKey(); + } + + @Override + public Result getCurrentValue() throws IOException, InterruptedException { + return trr.getCurrentValue(); + } + + @Override + public float getProgress() throws IOException, InterruptedException { + return trr.getProgress(); + } + + @Override + public void initialize(InputSplit inputsplit, TaskAttemptContext context) + throws IOException, InterruptedException { + trr.initialize(inputsplit, context); + } + + @Override + public boolean nextKeyValue() throws IOException, InterruptedException { + return trr.nextKeyValue(); + } + }; } catch (IOException ioe) { // If there is an exception make sure that all // resources are closed and released. trr.close(); + if (connection != null) { + connection.close(); + } throw ioe; } - return trr; } /** diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReader.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReader.java index 9ff90e78e8d..21dc21387ff 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReader.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/TableRecordReader.java @@ -22,7 +22,6 @@ import java.io.IOException; import org.apache.hadoop.hbase.classification.InterfaceAudience; import org.apache.hadoop.hbase.classification.InterfaceStability; -import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.Table; @@ -41,7 +40,6 @@ public class TableRecordReader extends RecordReader { private TableRecordReaderImpl recordReaderImpl = new TableRecordReaderImpl(); - private Connection connection = null; /** * Restart from survivable exceptions by creating a new scanner. @@ -87,10 +85,8 @@ extends RecordReader { * @see org.apache.hadoop.mapreduce.RecordReader#close() */ @Override - public void close() throws IOException { + public void close() { this.recordReaderImpl.close(); - if (this.connection != null) - this.connection.close(); } /** @@ -162,7 +158,4 @@ extends RecordReader { return this.recordReaderImpl.getProgress(); } - public void setConnection(Connection connection) { - this.connection = connection; - } }