MAPREDUCE-4809. Change visibility of classes for pluggable sort changes. (masokan via tucu)
git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/branches/branch-2@1438794 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
parent
c4c039bb29
commit
7a6b9f3609
|
@ -50,6 +50,9 @@ Release 2.0.3-alpha - Unreleased
|
|||
|
||||
MAPREDUCE-4949. Enable multiple pi jobs to run in parallel. (sandyr via tucu)
|
||||
|
||||
MAPREDUCE-4809. Change visibility of classes for pluggable sort changes.
|
||||
(masokan via tucu)
|
||||
|
||||
OPTIMIZATIONS
|
||||
|
||||
BUG FIXES
|
||||
|
|
|
@ -34,6 +34,8 @@ import java.util.concurrent.locks.ReentrantLock;
|
|||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.fs.FSDataInputStream;
|
||||
import org.apache.hadoop.fs.FSDataOutputStream;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
|
@ -71,7 +73,9 @@ import org.apache.hadoop.util.StringInterner;
|
|||
import org.apache.hadoop.util.StringUtils;
|
||||
|
||||
/** A Map task. */
|
||||
class MapTask extends Task {
|
||||
@InterfaceAudience.LimitedPrivate({"MapReduce"})
|
||||
@InterfaceStability.Unstable
|
||||
public class MapTask extends Task {
|
||||
/**
|
||||
* The size of each record in the index file for the map-outputs.
|
||||
*/
|
||||
|
|
|
@ -583,9 +583,9 @@ abstract public class Task implements Writable, Configurable {
|
|||
return status;
|
||||
}
|
||||
|
||||
@InterfaceAudience.Private
|
||||
@InterfaceAudience.LimitedPrivate({"MapReduce"})
|
||||
@InterfaceStability.Unstable
|
||||
protected class TaskReporter
|
||||
public class TaskReporter
|
||||
extends org.apache.hadoop.mapreduce.StatusReporter
|
||||
implements Runnable, Reporter {
|
||||
private TaskUmbilicalProtocol umbilical;
|
||||
|
@ -1465,9 +1465,9 @@ abstract public class Task implements Writable, Configurable {
|
|||
return reducerContext;
|
||||
}
|
||||
|
||||
@InterfaceAudience.Private
|
||||
@InterfaceAudience.LimitedPrivate({"MapReduce"})
|
||||
@InterfaceStability.Unstable
|
||||
protected static abstract class CombinerRunner<K,V> {
|
||||
public static abstract class CombinerRunner<K,V> {
|
||||
protected final Counters.Counter inputCounter;
|
||||
protected final JobConf job;
|
||||
protected final TaskReporter reporter;
|
||||
|
@ -1485,13 +1485,13 @@ abstract public class Task implements Writable, Configurable {
|
|||
* @param iterator the key/value pairs to use as input
|
||||
* @param collector the output collector
|
||||
*/
|
||||
abstract void combine(RawKeyValueIterator iterator,
|
||||
public abstract void combine(RawKeyValueIterator iterator,
|
||||
OutputCollector<K,V> collector
|
||||
) throws IOException, InterruptedException,
|
||||
ClassNotFoundException;
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
static <K,V>
|
||||
public static <K,V>
|
||||
CombinerRunner<K,V> create(JobConf job,
|
||||
TaskAttemptID taskId,
|
||||
Counters.Counter inputCounter,
|
||||
|
@ -1541,7 +1541,7 @@ abstract public class Task implements Writable, Configurable {
|
|||
}
|
||||
|
||||
@SuppressWarnings("unchecked")
|
||||
protected void combine(RawKeyValueIterator kvIter,
|
||||
public void combine(RawKeyValueIterator kvIter,
|
||||
OutputCollector<K,V> combineCollector
|
||||
) throws IOException {
|
||||
Reducer<K,V,K,V> combiner =
|
||||
|
@ -1610,7 +1610,7 @@ abstract public class Task implements Writable, Configurable {
|
|||
|
||||
@SuppressWarnings("unchecked")
|
||||
@Override
|
||||
void combine(RawKeyValueIterator iterator,
|
||||
public void combine(RawKeyValueIterator iterator,
|
||||
OutputCollector<K,V> collector
|
||||
) throws IOException, InterruptedException,
|
||||
ClassNotFoundException {
|
||||
|
|
|
@ -17,9 +17,14 @@
|
|||
*/
|
||||
package org.apache.hadoop.mapreduce.task.reduce;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
|
||||
/**
|
||||
* An interface for reporting exceptions to other threads
|
||||
*/
|
||||
interface ExceptionReporter {
|
||||
@InterfaceAudience.LimitedPrivate({"MapReduce"})
|
||||
@InterfaceStability.Unstable
|
||||
public interface ExceptionReporter {
|
||||
void reportException(Throwable t);
|
||||
}
|
||||
|
|
|
@ -20,9 +20,14 @@ package org.apache.hadoop.mapreduce.task.reduce;
|
|||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
|
||||
import org.apache.hadoop.mapreduce.TaskAttemptID;
|
||||
|
||||
class MapHost {
|
||||
@InterfaceAudience.LimitedPrivate({"MapReduce"})
|
||||
@InterfaceStability.Unstable
|
||||
public class MapHost {
|
||||
|
||||
public static enum State {
|
||||
IDLE, // No map outputs available
|
||||
|
|
|
@ -24,6 +24,8 @@ import java.util.concurrent.atomic.AtomicInteger;
|
|||
|
||||
import org.apache.commons.logging.Log;
|
||||
import org.apache.commons.logging.LogFactory;
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
import org.apache.hadoop.conf.Configuration;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.LocalDirAllocator;
|
||||
|
@ -33,7 +35,9 @@ import org.apache.hadoop.mapred.JobConf;
|
|||
import org.apache.hadoop.mapred.MapOutputFile;
|
||||
import org.apache.hadoop.mapreduce.TaskAttemptID;
|
||||
|
||||
class MapOutput<K,V> {
|
||||
@InterfaceAudience.LimitedPrivate({"MapReduce"})
|
||||
@InterfaceStability.Unstable
|
||||
public class MapOutput<K,V> {
|
||||
private static final Log LOG = LogFactory.getLog(MapOutput.class);
|
||||
private static AtomicInteger ID = new AtomicInteger(0);
|
||||
|
||||
|
@ -62,7 +66,7 @@ class MapOutput<K,V> {
|
|||
|
||||
private final boolean primaryMapOutput;
|
||||
|
||||
MapOutput(TaskAttemptID mapId, MergeManager<K,V> merger, long size,
|
||||
public MapOutput(TaskAttemptID mapId, MergeManager<K,V> merger, long size,
|
||||
JobConf conf, LocalDirAllocator localDirAllocator,
|
||||
int fetcher, boolean primaryMapOutput, MapOutputFile mapOutputFile)
|
||||
throws IOException {
|
||||
|
@ -87,7 +91,7 @@ class MapOutput<K,V> {
|
|||
this.primaryMapOutput = primaryMapOutput;
|
||||
}
|
||||
|
||||
MapOutput(TaskAttemptID mapId, MergeManager<K,V> merger, int size,
|
||||
public MapOutput(TaskAttemptID mapId, MergeManager<K,V> merger, int size,
|
||||
boolean primaryMapOutput) {
|
||||
this.id = ID.incrementAndGet();
|
||||
this.mapId = mapId;
|
||||
|
|
|
@ -60,8 +60,8 @@ import org.apache.hadoop.util.ReflectionUtils;
|
|||
|
||||
import com.google.common.annotations.VisibleForTesting;
|
||||
|
||||
@SuppressWarnings(value={"unchecked"})
|
||||
@InterfaceAudience.Private
|
||||
@SuppressWarnings(value={"unchecked", "deprecation"})
|
||||
@InterfaceAudience.LimitedPrivate({"MapReduce"})
|
||||
@InterfaceStability.Unstable
|
||||
public class MergeManager<K, V> {
|
||||
|
||||
|
|
|
@ -39,7 +39,7 @@ import org.apache.hadoop.mapreduce.MRJobConfig;
|
|||
import org.apache.hadoop.mapreduce.TaskAttemptID;
|
||||
import org.apache.hadoop.util.Progress;
|
||||
|
||||
@InterfaceAudience.LimitedPrivate("mapreduce")
|
||||
@InterfaceAudience.LimitedPrivate({"MapReduce"})
|
||||
@InterfaceStability.Unstable
|
||||
@SuppressWarnings({"unchecked", "rawtypes"})
|
||||
public class Shuffle<K, V> implements ShuffleConsumerPlugin<K, V>, ExceptionReporter {
|
||||
|
|
|
@ -17,6 +17,9 @@
|
|||
*/
|
||||
package org.apache.hadoop.mapreduce.task.reduce;
|
||||
|
||||
import org.apache.hadoop.classification.InterfaceAudience;
|
||||
import org.apache.hadoop.classification.InterfaceStability;
|
||||
|
||||
import org.apache.hadoop.mapred.JobConf;
|
||||
import org.apache.hadoop.mapreduce.MRJobConfig;
|
||||
import org.apache.hadoop.mapreduce.TaskAttemptID;
|
||||
|
@ -25,7 +28,9 @@ import org.apache.hadoop.metrics.MetricsRecord;
|
|||
import org.apache.hadoop.metrics.MetricsUtil;
|
||||
import org.apache.hadoop.metrics.Updater;
|
||||
|
||||
class ShuffleClientMetrics implements Updater {
|
||||
@InterfaceAudience.LimitedPrivate({"MapReduce"})
|
||||
@InterfaceStability.Unstable
|
||||
public class ShuffleClientMetrics implements Updater {
|
||||
|
||||
private MetricsRecord shuffleMetrics = null;
|
||||
private int numFailedFetches = 0;
|
||||
|
|
Loading…
Reference in New Issue