HBASE-8386 deprecate TableMapReduce.addDependencyJars(Configuration, class<?> ...)
Signed-off-by: Matteo Bertozzi <matteo.bertozzi@cloudera.com>
This commit is contained in:
parent
c59f76485e
commit
431c8c9ad0
|
@ -785,7 +785,8 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
|
|||
|
||||
job.getConfiguration().setBoolean("mapreduce.map.speculative", false);
|
||||
TableMapReduceUtil.addDependencyJars(job);
|
||||
TableMapReduceUtil.addDependencyJars(job.getConfiguration(), AbstractHBaseTool.class);
|
||||
TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(),
|
||||
AbstractHBaseTool.class);
|
||||
TableMapReduceUtil.initCredentials(job);
|
||||
|
||||
boolean success = jobCompletion(job);
|
||||
|
@ -1296,7 +1297,8 @@ public class IntegrationTestBigLinkedList extends IntegrationTestBase {
|
|||
|
||||
TableMapReduceUtil.initTableMapperJob(getTableName(getConf()).getName(), scan,
|
||||
VerifyMapper.class, BytesWritable.class, BytesWritable.class, job);
|
||||
TableMapReduceUtil.addDependencyJars(job.getConfiguration(), AbstractHBaseTool.class);
|
||||
TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(),
|
||||
AbstractHBaseTool.class);
|
||||
|
||||
job.getConfiguration().setBoolean("mapreduce.map.speculative", false);
|
||||
|
||||
|
|
|
@ -348,7 +348,7 @@ public void cleanUpCluster() throws Exception {
|
|||
|
||||
TableMapReduceUtil.addDependencyJars(job);
|
||||
|
||||
TableMapReduceUtil.addDependencyJars(job.getConfiguration(), AbstractHBaseTool.class);
|
||||
TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(), AbstractHBaseTool.class);
|
||||
TableMapReduceUtil.initCredentials(job);
|
||||
assertTrue(job.waitForCompletion(true));
|
||||
return job;
|
||||
|
@ -372,7 +372,7 @@ public void cleanUpCluster() throws Exception {
|
|||
TableMapReduceUtil.initTableMapperJob(
|
||||
htd.getTableName().getNameAsString(), scan, VerifyMapper.class,
|
||||
BytesWritable.class, BytesWritable.class, job);
|
||||
TableMapReduceUtil.addDependencyJars(job.getConfiguration(), AbstractHBaseTool.class);
|
||||
TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(), AbstractHBaseTool.class);
|
||||
int scannerCaching = conf.getInt("verify.scannercaching", SCANNER_CACHING);
|
||||
TableMapReduceUtil.setScannerCaching(job, scannerCaching);
|
||||
|
||||
|
|
|
@ -331,7 +331,7 @@ public class IntegrationTestWithCellVisibilityLoadAndVerify extends IntegrationT
|
|||
scan.setAuthorizations(new Authorizations(auths));
|
||||
TableMapReduceUtil.initTableMapperJob(htd.getTableName().getNameAsString(), scan,
|
||||
VerifyMapper.class, NullWritable.class, NullWritable.class, job);
|
||||
TableMapReduceUtil.addDependencyJars(job.getConfiguration(), AbstractHBaseTool.class);
|
||||
TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(), AbstractHBaseTool.class);
|
||||
int scannerCaching = conf.getInt("verify.scannercaching", SCANNER_CACHING);
|
||||
TableMapReduceUtil.setScannerCaching(job, scannerCaching);
|
||||
job.setNumReduceTasks(0);
|
||||
|
|
|
@ -362,7 +362,7 @@ public class TableMapReduceUtil {
|
|||
*/
|
||||
public static void addDependencyJars(JobConf job) throws IOException {
|
||||
org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.addHBaseDependencyJars(job);
|
||||
org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.addDependencyJars(
|
||||
org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.addDependencyJarsForClasses(
|
||||
job,
|
||||
// when making changes here, consider also mapreduce.TableMapReduceUtil
|
||||
// pull job classes
|
||||
|
|
|
@ -616,7 +616,7 @@ public class Import extends Configured implements Tool {
|
|||
try {
|
||||
Class<? extends Filter> filter = conf.getClass(FILTER_CLASS_CONF_KEY, null, Filter.class);
|
||||
if (filter != null) {
|
||||
TableMapReduceUtil.addDependencyJars(conf, filter);
|
||||
TableMapReduceUtil.addDependencyJarsForClasses(conf, filter);
|
||||
}
|
||||
} catch (Exception e) {
|
||||
throw new IOException(e);
|
||||
|
@ -643,7 +643,7 @@ public class Import extends Configured implements Tool {
|
|||
fs.deleteOnExit(partitionsPath);
|
||||
job.setPartitionerClass(KeyValueWritableComparablePartitioner.class);
|
||||
job.setNumReduceTasks(regionLocator.getStartKeys().length);
|
||||
TableMapReduceUtil.addDependencyJars(job.getConfiguration(),
|
||||
TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(),
|
||||
com.google.common.base.Preconditions.class);
|
||||
}
|
||||
} else if (hfileOutPath != null) {
|
||||
|
@ -658,7 +658,7 @@ public class Import extends Configured implements Tool {
|
|||
job.setMapOutputKeyClass(ImmutableBytesWritable.class);
|
||||
job.setMapOutputValueClass(KeyValue.class);
|
||||
HFileOutputFormat2.configureIncrementalLoad(job, table.getTableDescriptor(), regionLocator);
|
||||
TableMapReduceUtil.addDependencyJars(job.getConfiguration(),
|
||||
TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(),
|
||||
com.google.common.base.Preconditions.class);
|
||||
}
|
||||
} else {
|
||||
|
|
|
@ -587,7 +587,7 @@ public class ImportTsv extends Configured implements Tool {
|
|||
KeyValueSerialization.class.getName());
|
||||
}
|
||||
TableMapReduceUtil.addDependencyJars(job);
|
||||
TableMapReduceUtil.addDependencyJars(job.getConfiguration(),
|
||||
TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(),
|
||||
com.google.common.base.Function.class /* Guava used by TsvParser */);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -339,7 +339,7 @@ public class TableMapReduceUtil {
|
|||
|
||||
if (addDependencyJars) {
|
||||
addDependencyJars(job);
|
||||
addDependencyJars(job.getConfiguration(), MetricRegistry.class);
|
||||
addDependencyJarsForClasses(job.getConfiguration(), MetricRegistry.class);
|
||||
}
|
||||
|
||||
resetCacheConfig(job.getConfiguration());
|
||||
|
@ -787,7 +787,7 @@ public class TableMapReduceUtil {
|
|||
" Continuing without it.");
|
||||
}
|
||||
|
||||
addDependencyJars(conf,
|
||||
addDependencyJarsForClasses(conf,
|
||||
// explicitly pull a class from each module
|
||||
org.apache.hadoop.hbase.HConstants.class, // hbase-common
|
||||
org.apache.hadoop.hbase.protobuf.generated.ClientProtos.class, // hbase-protocol
|
||||
|
@ -835,7 +835,7 @@ public class TableMapReduceUtil {
|
|||
public static void addDependencyJars(Job job) throws IOException {
|
||||
addHBaseDependencyJars(job.getConfiguration());
|
||||
try {
|
||||
addDependencyJars(job.getConfiguration(),
|
||||
addDependencyJarsForClasses(job.getConfiguration(),
|
||||
// when making changes here, consider also mapred.TableMapReduceUtil
|
||||
// pull job classes
|
||||
job.getMapOutputKeyClass(),
|
||||
|
@ -855,9 +855,33 @@ public class TableMapReduceUtil {
|
|||
* Add the jars containing the given classes to the job's configuration
|
||||
* such that JobClient will ship them to the cluster and add them to
|
||||
* the DistributedCache.
|
||||
* @deprecated rely on {@link #addDependencyJars(Job)} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public static void addDependencyJars(Configuration conf,
|
||||
Class<?>... classes) throws IOException {
|
||||
LOG.warn("The addDependencyJars(Configuration, Class<?>...) method has been deprecated since it"
|
||||
+ " is easy to use incorrectly. Most users should rely on addDependencyJars(Job) " +
|
||||
"instead. See HBASE-8386 for more details.");
|
||||
addDependencyJarsForClasses(conf, classes);
|
||||
}
|
||||
|
||||
/**
|
||||
* Add the jars containing the given classes to the job's configuration
|
||||
* such that JobClient will ship them to the cluster and add them to
|
||||
* the DistributedCache.
|
||||
*
|
||||
* N.B. that this method at most adds one jar per class given. If there is more than one
|
||||
* jar available containing a class with the same name as a given class, we don't define
|
||||
* which of those jars might be chosen.
|
||||
*
|
||||
* @param conf The Hadoop Configuration to modify
|
||||
* @param classes will add just those dependencies needed to find the given classes
|
||||
* @throws IOException if an underlying library call fails.
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
public static void addDependencyJarsForClasses(Configuration conf,
|
||||
Class<?>... classes) throws IOException {
|
||||
|
||||
FileSystem localFs = FileSystem.getLocal(conf);
|
||||
Set<String> jars = new HashSet<String>();
|
||||
|
|
|
@ -282,7 +282,7 @@ public class WALPlayer extends Configured implements Tool {
|
|||
RegionLocator regionLocator = conn.getRegionLocator(tableName)) {
|
||||
HFileOutputFormat2.configureIncrementalLoad(job, table.getTableDescriptor(), regionLocator);
|
||||
}
|
||||
TableMapReduceUtil.addDependencyJars(job.getConfiguration(),
|
||||
TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(),
|
||||
com.google.common.base.Preconditions.class);
|
||||
} else {
|
||||
// output to live cluster
|
||||
|
@ -295,7 +295,7 @@ public class WALPlayer extends Configured implements Tool {
|
|||
}
|
||||
String codecCls = WALCellCodec.getWALCellCodecClass(conf);
|
||||
try {
|
||||
TableMapReduceUtil.addDependencyJars(job.getConfiguration(), Class.forName(codecCls));
|
||||
TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(), Class.forName(codecCls));
|
||||
} catch (Exception e) {
|
||||
throw new IOException("Cannot determine wal codec class " + codecCls, e);
|
||||
}
|
||||
|
|
|
@ -499,7 +499,7 @@ public class PerformanceEvaluation extends Configured implements Tool {
|
|||
TextOutputFormat.setOutputPath(job, new Path(inputDir.getParent(), "outputs"));
|
||||
|
||||
TableMapReduceUtil.addDependencyJars(job);
|
||||
TableMapReduceUtil.addDependencyJars(job.getConfiguration(),
|
||||
TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(),
|
||||
Histogram.class, // yammer metrics
|
||||
ObjectMapper.class); // jackson-mapper-asl
|
||||
|
||||
|
|
|
@ -243,7 +243,7 @@ public class TestTableSnapshotInputFormat extends TableSnapshotInputFormatTestBa
|
|||
JobConf jobConf = new JobConf(util.getConfiguration());
|
||||
|
||||
jobConf.setJarByClass(util.getClass());
|
||||
org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.addDependencyJars(jobConf,
|
||||
org.apache.hadoop.hbase.mapreduce.TableMapReduceUtil.addDependencyJarsForClasses(jobConf,
|
||||
TestTableSnapshotInputFormat.class);
|
||||
|
||||
TableMapReduceUtil.initTableSnapshotMapJob(snapshotName, COLUMNS,
|
||||
|
|
|
@ -281,7 +281,7 @@ public class TestTableSnapshotInputFormat extends TableSnapshotInputFormatTestBa
|
|||
Scan scan = new Scan(startRow, endRow); // limit the scan
|
||||
|
||||
job.setJarByClass(util.getClass());
|
||||
TableMapReduceUtil.addDependencyJars(job.getConfiguration(),
|
||||
TableMapReduceUtil.addDependencyJarsForClasses(job.getConfiguration(),
|
||||
TestTableSnapshotInputFormat.class);
|
||||
|
||||
TableMapReduceUtil.initTableSnapshotMapperJob(snapshotName,
|
||||
|
|
|
@ -73,7 +73,7 @@ This example assumes you use a BASH-compatible shell.
|
|||
$ HADOOP_CLASSPATH=`${HBASE_HOME}/bin/hbase classpath` ${HADOOP_HOME}/bin/hadoop jar ${HBASE_HOME}/lib/hbase-server-VERSION.jar rowcounter usertable
|
||||
----
|
||||
|
||||
When the command runs, internally, the HBase JAR finds the dependencies it needs for ZooKeeper, Guava, and its other dependencies on the passed `HADOOP_CLASSPATH` and adds the JARs to the MapReduce job configuration.
|
||||
When the command runs, internally, the HBase JAR finds the dependencies it needs and adds them to the MapReduce job configuration.
|
||||
See the source at `TableMapReduceUtil#addDependencyJars(org.apache.hadoop.mapreduce.Job)` for how this is done.
|
||||
|
||||
The command `hbase mapredcp` can also help you dump the CLASSPATH entries required by MapReduce, which are the same jars `TableMapReduceUtil#addDependencyJars` would add.
|
||||
|
|
Loading…
Reference in New Issue