From 75a96a140a0d8b5ba9636e76751f90194134266a Mon Sep 17 00:00:00 2001 From: Vinod Kumar Vavilapalli Date: Fri, 31 May 2013 23:57:42 +0000 Subject: [PATCH] MAPREDUCE-5231. Bring back a constructor in mapred's DBInputFormat.DBRecordReader for binary compatibility with 1.x mapred APIs. Contributed by Zhijie Shen. svn merge --ignore-ancestry -c 1488436 ../../trunk/ git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1488437 13f79535-47bb-0310-9956-ffa450edef68 --- hadoop-mapreduce-project/CHANGES.txt | 4 ++++ .../hadoop/mapred/lib/db/DBInputFormat.java | 13 ++++++++++- .../mapreduce/lib/db/DBInputFormat.java | 22 +++++++++---------- 3 files changed, 26 insertions(+), 13 deletions(-) diff --git a/hadoop-mapreduce-project/CHANGES.txt b/hadoop-mapreduce-project/CHANGES.txt index 0a705e10748..baba647e055 100644 --- a/hadoop-mapreduce-project/CHANGES.txt +++ b/hadoop-mapreduce-project/CHANGES.txt @@ -118,6 +118,10 @@ Release 2.0.5-beta - UNRELEASED MAPREDUCE-5275. Bring back a couple of APIs in mapreduce.security.TokenCache for binary compatibility with 1.x mapreduce APIs. (Mayank Bansal via vinodkv) + MAPREDUCE-5231. Bring back a constructor in mapred's + DBInputFormat.DBRecordReader for binary compatibility with 1.x mapred APIs. + (Zhijie Shen via vinodkv) + OPTIMIZATIONS MAPREDUCE-4974. Optimising the LineRecordReader initialize() method diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/db/DBInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/db/DBInputFormat.java index fab711e62ca..9b32530558e 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/db/DBInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapred/lib/db/DBInputFormat.java @@ -37,6 +37,7 @@ import org.apache.hadoop.mapreduce.Job; @InterfaceAudience.Public @InterfaceStability.Stable +@SuppressWarnings("deprecation") public class DBInputFormat extends org.apache.hadoop.mapreduce.lib.db.DBInputFormat implements InputFormat, JobConfigurable { @@ -48,6 +49,17 @@ public class DBInputFormat protected class DBRecordReader extends org.apache.hadoop.mapreduce.lib.db.DBRecordReader implements RecordReader { + /** + * The constructor is kept to be compatible with M/R 1.x + * + * @param split The InputSplit to read data for + * @throws SQLException + */ + protected DBRecordReader(DBInputSplit split, Class inputClass, + JobConf job) throws SQLException { + super(split, inputClass, job, connection, dbConf, conditions, fieldNames, tableName); + } + /** * @param split The InputSplit to read data for * @throws SQLException @@ -152,7 +164,6 @@ public class DBInputFormat } /** {@inheritDoc} */ - @SuppressWarnings("unchecked") public RecordReader getRecordReader(InputSplit split, JobConf job, Reporter reporter) throws IOException { diff --git a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java index d1b9d763948..c0530c253a7 100644 --- a/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java +++ b/hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-core/src/main/java/org/apache/hadoop/mapreduce/lib/db/DBInputFormat.java @@ -32,6 +32,10 @@ import java.util.List; import org.apache.commons.logging.Log; import org.apache.commons.logging.LogFactory; +import org.apache.hadoop.classification.InterfaceAudience; +import org.apache.hadoop.classification.InterfaceStability; +import org.apache.hadoop.conf.Configurable; +import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.io.LongWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapreduce.InputFormat; @@ -41,11 +45,6 @@ import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.TaskAttemptContext; -import org.apache.hadoop.util.ReflectionUtils; -import org.apache.hadoop.classification.InterfaceAudience; -import org.apache.hadoop.classification.InterfaceStability; -import org.apache.hadoop.conf.Configurable; -import org.apache.hadoop.conf.Configuration; /** * A InputFormat that reads input data from an SQL table. *

@@ -62,7 +61,7 @@ public class DBInputFormat private static final Log LOG = LogFactory.getLog(DBInputFormat.class); - private String dbProductName = "DEFAULT"; + protected String dbProductName = "DEFAULT"; /** * A Class that does nothing, implementing DBWritable @@ -144,15 +143,15 @@ public class DBInputFormat } } - private String conditions; + protected String conditions; - private Connection connection; + protected Connection connection; - private String tableName; + protected String tableName; - private String[] fieldNames; + protected String[] fieldNames; - private DBConfiguration dbConf; + protected DBConfiguration dbConf; /** {@inheritDoc} */ public void setConf(Configuration conf) { @@ -230,7 +229,6 @@ public class DBInputFormat } /** {@inheritDoc} */ - @SuppressWarnings("unchecked") public RecordReader createRecordReader(InputSplit split, TaskAttemptContext context) throws IOException, InterruptedException {