MAPREDUCE-3771. Un-deprecated the old mapred apis, port of MAPREDUCE-1735.

git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1239482 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
Arun Murthy 2012-02-02 08:37:58 +00:00
parent c909aedbc1
commit cc74881acb
135 changed files with 99 additions and 482 deletions

View File

@ -223,6 +223,9 @@ Release 0.23.1 - Unreleased
MAPREDUCE-3774. Moved yarn-default.xml to hadoop-yarn-common from
hadoop-server-common. (Mahadev Konar via vinodkv)
MAPREDUCE-3771. Un-deprecated the old mapred apis, port of MAPREDUCE-1735.
(acmurthy)
OPTIMIZATIONS
MAPREDUCE-3567. Extraneous JobConf objects in AM heap. (Vinod Kumar

View File

@ -119,9 +119,7 @@ import org.apache.hadoop.mapreduce.Job;
* @see org.apache.hadoop.mapred.JobConf
* @see org.apache.hadoop.mapred.JobClient
* @see org.apache.hadoop.mapreduce.Job
* @deprecated Use methods on {@link Job}.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class DistributedCache extends

View File

@ -16,8 +16,4 @@
* limitations under the License.
*
*/
/**
* <b>Deprecated.</b> Use {@link org.apache.hadoop.mapreduce.Job} instead.
*/
@Deprecated
package org.apache.hadoop.filecache;

View File

@ -62,9 +62,7 @@ import org.apache.hadoop.mapreduce.Cluster.JobTrackerStatus;
* {@link JobClient#getClusterStatus()}.</p>
*
* @see JobClient
* @deprecated Use {@link ClusterMetrics} or {@link TaskTrackerInfo} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class ClusterStatus implements Writable {

View File

@ -18,27 +18,24 @@
package org.apache.hadoop.mapred;
import static org.apache.hadoop.mapreduce.util.CountersStrings.parseEscapedCompactString;
import static org.apache.hadoop.mapreduce.util.CountersStrings.toEscapedCompactString;
import java.text.ParseException;
import org.apache.commons.logging.Log;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormatCounter;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.mapreduce.FileSystemCounter;
import org.apache.hadoop.mapreduce.counters.AbstractCounterGroup;
import org.apache.hadoop.mapreduce.counters.AbstractCounters;
import org.apache.hadoop.mapreduce.counters.CounterGroupBase;
import org.apache.hadoop.mapreduce.counters.CounterGroupFactory;
import org.apache.hadoop.mapreduce.counters.FrameworkCounterGroup;
import org.apache.hadoop.mapreduce.counters.FileSystemCounterGroup;
import org.apache.hadoop.mapreduce.counters.FrameworkCounterGroup;
import org.apache.hadoop.mapreduce.counters.GenericCounter;
import org.apache.hadoop.mapreduce.counters.Limits;
import static org.apache.hadoop.mapreduce.util.CountersStrings.*;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormatCounter;
/**
* A set of named counters.
@ -49,9 +46,7 @@ import static org.apache.hadoop.mapreduce.util.CountersStrings.*;
*
* <p><code>Counters</code> are bunched into {@link Group}s, each comprising of
* counters from a particular <code>Enum</code> class.
* @deprecated Use {@link org.apache.hadoop.mapreduce.Counters} instead.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class Counters

View File

@ -27,7 +27,6 @@ import org.apache.hadoop.classification.InterfaceStability;
* Used when target file already exists for any operation and
* is not configured to be overwritten.
*/
@Deprecated // may be removed after 0.23
@InterfaceAudience.Public
@InterfaceStability.Stable
public class FileAlreadyExistsException

View File

@ -54,10 +54,7 @@ import org.apache.hadoop.util.StringUtils;
* Subclasses of <code>FileInputFormat</code> can also override the
* {@link #isSplitable(FileSystem, Path)} method to ensure input-files are
* not split-up and are processed as a whole by {@link Mapper}s.
* @deprecated Use {@link org.apache.hadoop.mapreduce.lib.input.FileInputFormat}
* instead.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public abstract class FileInputFormat<K, V> implements InputFormat<K, V> {

View File

@ -29,10 +29,7 @@ import org.apache.hadoop.fs.Path;
/** A section of an input file. Returned by {@link
* InputFormat#getSplits(JobConf, int)} and passed to
* {@link InputFormat#getRecordReader(InputSplit,JobConf,Reporter)}.
* @deprecated Use {@link org.apache.hadoop.mapreduce.lib.input.FileSplit}
* instead.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class FileSplit extends org.apache.hadoop.mapreduce.InputSplit

View File

@ -30,7 +30,6 @@ import org.apache.hadoop.classification.InterfaceStability;
* @see TaskID
* @see TaskAttemptID
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public abstract class ID extends org.apache.hadoop.mapreduce.ID {

View File

@ -63,9 +63,7 @@ import org.apache.hadoop.fs.FileSystem;
* @see RecordReader
* @see JobClient
* @see FileInputFormat
* @deprecated Use {@link org.apache.hadoop.mapreduce.InputFormat} instead.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public interface InputFormat<K, V> {

View File

@ -34,9 +34,7 @@ import org.apache.hadoop.io.Writable;
*
* @see InputFormat
* @see RecordReader
* @deprecated Use {@link org.apache.hadoop.mapreduce.InputSplit} instead.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public interface InputSplit extends Writable {

View File

@ -29,6 +29,9 @@ import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.ClusterStatus.BlackListInfo;
import org.apache.hadoop.mapreduce.Cluster;
import org.apache.hadoop.mapreduce.ClusterMetrics;
@ -40,13 +43,10 @@ import org.apache.hadoop.mapreduce.filecache.DistributedCache;
import org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.mapreduce.tools.CLI;
import org.apache.hadoop.mapreduce.util.ConfigUtil;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenRenewer;
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
@ -132,9 +132,7 @@ import org.apache.hadoop.util.ToolRunner;
* @see ClusterStatus
* @see Tool
* @see DistributedCache
* @deprecated Use {@link Job} and {@link Cluster} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class JobClient extends CLI {

View File

@ -20,7 +20,6 @@ package org.apache.hadoop.mapred;
import java.io.IOException;
import java.net.URL;
import java.net.URLDecoder;
import java.util.Enumeration;
@ -28,24 +27,26 @@ import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.mapreduce.filecache.DistributedCache;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.*;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.WritableComparator;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.mapred.lib.HashPartitioner;
import org.apache.hadoop.mapred.lib.IdentityMapper;
import org.apache.hadoop.mapred.lib.IdentityReducer;
import org.apache.hadoop.mapred.lib.HashPartitioner;
import org.apache.hadoop.mapred.lib.KeyFieldBasedComparator;
import org.apache.hadoop.mapred.lib.KeyFieldBasedPartitioner;
import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.mapreduce.MRJobConfig;
import org.apache.hadoop.mapreduce.filecache.DistributedCache;
import org.apache.hadoop.mapreduce.util.ConfigUtil;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.util.ReflectionUtils;
@ -107,9 +108,7 @@ import org.apache.log4j.Level;
* @see ClusterStatus
* @see Tool
* @see DistributedCache
* @deprecated Use {@link Configuration} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class JobConf extends Configuration {

View File

@ -22,7 +22,6 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/** That what may be configured. */
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public interface JobConfigurable {

View File

@ -22,10 +22,6 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.util.Progressable;
/**
* @deprecated Use {@link org.apache.hadoop.mapreduce.JobContext} instead.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public interface JobContext extends org.apache.hadoop.mapreduce.JobContext {

View File

@ -21,10 +21,6 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.util.Progressable;
/**
* @deprecated Use {@link org.apache.hadoop.mapreduce.JobContext} instead.
*/
@Deprecated
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class JobContextImpl

View File

@ -41,7 +41,6 @@ import org.apache.hadoop.classification.InterfaceStability;
* @see TaskID
* @see TaskAttemptID
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class JobID extends org.apache.hadoop.mapreduce.JobID {

View File

@ -22,9 +22,7 @@ import org.apache.hadoop.classification.InterfaceStability;
/**
* Used to describe the priority of the running job.
* @deprecated Use {@link org.apache.hadoop.mapreduce.JobPriority} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public enum JobPriority {

View File

@ -29,9 +29,7 @@ import org.apache.hadoop.mapreduce.QueueState;
/**
* Class that contains the information regarding the Job Queues which are
* maintained by the Hadoop Map/Reduce framework.
* @deprecated Use {@link QueueInfo} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class JobQueueInfo extends QueueInfo {

View File

@ -29,9 +29,7 @@ import org.apache.hadoop.security.authorize.AccessControlList;
* not intended to be a comprehensive piece of data.
* For that, look at JobProfile.
*************************************************
*@deprecated Use {@link org.apache.hadoop.mapreduce.JobStatus} instead
**/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class JobStatus extends org.apache.hadoop.mapreduce.JobStatus {

View File

@ -31,12 +31,7 @@ import org.apache.hadoop.io.Text;
* separator character. The separator can be specified in config file
* under the attribute name mapreduce.input.keyvaluelinerecordreader.key.value.separator. The default
* separator is the tab character ('\t').
*
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.input.KeyValueLineRecordReader}
* instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class KeyValueLineRecordReader implements RecordReader<Text, Text> {

View File

@ -34,12 +34,7 @@ import org.apache.hadoop.io.compress.SplittableCompressionCodec;
* Either linefeed or carriage-return are used to signal end of line. Each line
* is divided into key and value parts by a separator byte. If no such a byte
* exists, the key will be the entire line and value will be empty.
*
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.input.KeyValueTextInputFormat}
* instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class KeyValueTextInputFormat extends FileInputFormat<Text, Text>

View File

@ -41,10 +41,7 @@ import org.apache.commons.logging.Log;
/**
* Treats keys as offset in file and value as line.
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.input.LineRecordReader} instead.
*/
@Deprecated
@InterfaceAudience.LimitedPrivate({"MapReduce", "Pig"})
@InterfaceStability.Unstable
public class LineRecordReader implements RecordReader<LongWritable, Text> {

View File

@ -23,7 +23,6 @@ import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.fs.LocalDirAllocator;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapreduce.MRConfig;

View File

@ -36,10 +36,7 @@ import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.ReflectionUtils;
/** An {@link OutputFormat} that writes {@link MapFile}s.
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.output.MapFileOutputFormat} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class MapFileOutputFormat

View File

@ -23,7 +23,6 @@ import java.io.IOException;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.Closeable;
import org.apache.hadoop.mapred.JobConfigurable;
/**
* Base class for {@link Mapper} and {@link Reducer} implementations.
@ -31,7 +30,6 @@ import org.apache.hadoop.mapred.JobConfigurable;
* <p>Provides default no-op implementations for a few methods, most non-trivial
* applications need to override some of them.</p>
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class MapReduceBase implements Closeable, JobConfigurable {

View File

@ -30,9 +30,7 @@ import org.apache.hadoop.classification.InterfaceStability;
* control on map processing e.g. multi-threaded, asynchronous mappers etc.</p>
*
* @see Mapper
* @deprecated Use {@link org.apache.hadoop.mapreduce.Mapper} instead.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public interface MapRunnable<K1, V1, K2, V2>

View File

@ -37,7 +37,6 @@ import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileSystem.Statistics;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RawLocalFileSystem;

View File

@ -129,9 +129,7 @@ import org.apache.hadoop.io.compress.CompressionCodec;
* @see MapReduceBase
* @see MapRunnable
* @see SequenceFile
* @deprecated Use {@link org.apache.hadoop.mapreduce.Mapper} instead.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public interface Mapper<K1, V1, K2, V2> extends JobConfigurable, Closeable {

View File

@ -21,12 +21,16 @@ package org.apache.hadoop.mapred;
import java.io.IOException;
import java.net.InetSocketAddress;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.MRConfig;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
@Private
@Unstable
public class Master {
public enum State {

View File

@ -36,9 +36,7 @@ import org.apache.hadoop.fs.Path;
* Subclasses implement {@link #getRecordReader(InputSplit, JobConf, Reporter)}
* to construct <code>RecordReader</code>'s for <code>MultiFileSplit</code>'s.
* @see MultiFileSplit
* @deprecated Use {@link org.apache.hadoop.mapred.lib.CombineFileInputFormat} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public abstract class MultiFileInputFormat<K, V>

View File

@ -18,20 +18,16 @@
package org.apache.hadoop.mapred;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.HashSet;
import java.util.Set;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.BlockLocation;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.lib.CombineFileSplit;
/**
@ -42,9 +38,7 @@ import org.apache.hadoop.mapred.lib.CombineFileSplit;
* reading one record per file.
* @see FileSplit
* @see MultiFileInputFormat
* @deprecated Use {@link org.apache.hadoop.mapred.lib.CombineFileSplit} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class MultiFileSplit extends CombineFileSplit {

View File

@ -56,9 +56,7 @@ import org.apache.hadoop.classification.InterfaceStability;
* @see FileOutputCommitter
* @see JobContext
* @see TaskAttemptContext
* @deprecated Use {@link org.apache.hadoop.mapreduce.OutputCommitter} instead.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public abstract class OutputCommitter

View File

@ -44,9 +44,7 @@ import org.apache.hadoop.util.Progressable;
*
* @see RecordWriter
* @see JobConf
* @deprecated Use {@link org.apache.hadoop.mapreduce.OutputFormat} instead.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public interface OutputFormat<K, V> {

View File

@ -29,9 +29,6 @@ import org.apache.hadoop.fs.PathFilter;
* This can be used to list paths of output directory as follows:
* Path[] fileList = FileUtil.stat2Paths(fs.listStatus(outDir,
* new OutputLogFilter()));
* @deprecated Use
* {@link org.apache.hadoop.mapred.Utils.OutputFileUtils.OutputLogFilter}
* instead.
*/
@InterfaceAudience.Public
@InterfaceStability.Stable

View File

@ -32,9 +32,7 @@ import org.apache.hadoop.classification.InterfaceStability;
* record) is sent for reduction.</p>
*
* @see Reducer
* @deprecated Use {@link org.apache.hadoop.mapreduce.Partitioner} instead.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public interface Partitioner<K2, V2> extends JobConfigurable {

View File

@ -18,6 +18,9 @@
package org.apache.hadoop.mapred;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
/**
*
* This abstract class that represents a bucketed series of
@ -33,6 +36,8 @@ package org.apache.hadoop.mapred;
* bucket and how we interpret the readings by overriding
* {@code extendInternal(...)} and {@code initializeInterval()}
*/
@Private
@Unstable
public abstract class PeriodicStatsAccumulator {
// The range of progress from 0.0D through 1.0D is divided into
// count "progress segments". This object accumulates an

View File

@ -18,13 +18,16 @@
package org.apache.hadoop.mapred;
import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience.Private;
import org.apache.hadoop.classification.InterfaceStability.Unstable;
/*
* This object gathers the [currently four] PeriodStatset's that we
* are gathering for a particular task attempt for packaging and
* handling as a single object.
*/
@Private
@Unstable
public class ProgressSplitsBlock {
final PeriodicStatsAccumulator progressWallclockTime;
final PeriodicStatsAccumulator progressCPUTime;

View File

@ -20,9 +20,7 @@ package org.apache.hadoop.mapred;
/**
* Class to encapsulate Queue ACLs for a particular
* user.
* @deprecated Use {@link org.apache.hadoop.mapreduce.QueueAclsInfo} instead
*/
@Deprecated
class QueueAclsInfo extends org.apache.hadoop.mapreduce.QueueAclsInfo {
/**

View File

@ -19,7 +19,6 @@
package org.apache.hadoop.mapred;
import java.io.IOException;
import java.io.DataInput;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;

View File

@ -162,9 +162,7 @@ import org.apache.hadoop.io.Closeable;
* @see Partitioner
* @see Reporter
* @see MapReduceBase
* @deprecated Use {@link org.apache.hadoop.mapreduce.Reducer} instead.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public interface Reducer<K2, V2, K3, V3> extends JobConfigurable, Closeable {

View File

@ -34,9 +34,7 @@ import org.apache.hadoop.conf.Configuration;
* progress etc.</p>
*
* @see JobClient
* @deprecated Use {@link org.apache.hadoop.mapreduce.Job} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public interface RunningJob {

View File

@ -27,20 +27,11 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.SequenceFileInputFormat;
/**
* InputFormat reading keys, values from SequenceFiles in binary (raw)
* format.
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.input.SequenceFileAsBinaryInputFormat}
* instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class SequenceFileAsBinaryInputFormat

View File

@ -23,26 +23,20 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.BytesWritable;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableComparable;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.DefaultCodec;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.ReflectionUtils;
/**
* An {@link OutputFormat} that writes keys, values to
* {@link SequenceFile}s in binary(raw) format
*
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.output.SequenceFileAsBinaryOutputFormat}
* instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class SequenceFileAsBinaryOutputFormat

View File

@ -29,12 +29,7 @@ import org.apache.hadoop.io.Text;
* except it generates SequenceFileAsTextRecordReader
* which converts the input keys and values to their
* String forms by calling toString() method.
*
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.input.SequenceFileAsTextInputFormat}
* instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class SequenceFileAsTextInputFormat

View File

@ -31,11 +31,7 @@ import org.apache.hadoop.io.WritableComparable;
* This class converts the input keys and values to their String forms by calling toString()
* method. This class to SequenceFileAsTextInputFormat class is as LineRecordReader
* class to TextInputFormat class.
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.input.SequenceFileAsTextRecordReader}
* instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class SequenceFileAsTextRecordReader

View File

@ -29,11 +29,7 @@ import org.apache.hadoop.util.ReflectionUtils;
/**
* A class that allows a map/red job to work on a sample of sequence files.
* The sample is decided by the filter class set by the job.
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFilter}
* instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class SequenceFileInputFilter<K, V>

View File

@ -29,12 +29,9 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.MapFile;
/** An {@link InputFormat} for {@link SequenceFile}s.
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.input.SequenceFileInputFormat}
* instead.
/**
* An {@link InputFormat} for {@link SequenceFile}s.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class SequenceFileInputFormat<K, V> extends FileInputFormat<K, V> {

View File

@ -21,25 +21,22 @@ package org.apache.hadoop.mapred;
import java.io.IOException;
import java.util.Arrays;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.SequenceFile;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.DefaultCodec;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.util.*;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.ReflectionUtils;
/** An {@link OutputFormat} that writes {@link SequenceFile}s.
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.output.SequenceFileOutputFormat}
* instead.
/**
* An {@link OutputFormat} that writes {@link SequenceFile}s.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class SequenceFileOutputFormat <K,V> extends FileOutputFormat<K, V> {

View File

@ -29,7 +29,9 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.*;
import org.apache.hadoop.util.ReflectionUtils;
/** An {@link RecordReader} for {@link SequenceFile}s. */
/**
* An {@link RecordReader} for {@link SequenceFile}s.
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public class SequenceFileRecordReader<K, V> implements RecordReader<K, V> {

View File

@ -17,8 +17,8 @@
*/
package org.apache.hadoop.mapred;
import java.io.DataInputStream;
import java.io.File;
import static org.apache.hadoop.mapred.MapTask.MAP_OUTPUT_INDEX_RECORD_LENGTH;
import java.io.IOException;
import java.nio.ByteBuffer;
import java.nio.LongBuffer;
@ -27,15 +27,12 @@ import java.util.zip.CheckedOutputStream;
import java.util.zip.Checksum;
import org.apache.hadoop.fs.ChecksumException;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.SecureIOUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.PureJavaCrc32;
import org.apache.hadoop.fs.FSDataInputStream;
import static org.apache.hadoop.mapred.MapTask.MAP_OUTPUT_INDEX_RECORD_LENGTH;
class SpillRecord {

View File

@ -22,11 +22,6 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.util.Progressable;
/**
* @deprecated Use {@link org.apache.hadoop.mapreduce.TaskAttemptContext}
* instead.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public interface TaskAttemptContext

View File

@ -22,11 +22,6 @@ import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.mapreduce.Counter;
import org.apache.hadoop.util.Progressable;
/**
* @deprecated Use {@link org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl}
* instead.
*/
@Deprecated
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class TaskAttemptContextImpl

View File

@ -45,7 +45,6 @@ import org.apache.hadoop.mapreduce.TaskType;
* @see JobID
* @see TaskID
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class TaskAttemptID extends org.apache.hadoop.mapreduce.TaskAttemptID {

View File

@ -24,10 +24,7 @@ import org.apache.hadoop.classification.InterfaceStability;
/**
* This is used to track task completion events on
* job tracker.
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.TaskCompletionEvent} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class TaskCompletionEvent

View File

@ -47,7 +47,6 @@ import org.apache.hadoop.mapreduce.TaskType;
* @see JobID
* @see TaskAttemptID
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class TaskID extends org.apache.hadoop.mapreduce.TaskID {

View File

@ -23,7 +23,6 @@ import java.io.BufferedReader;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
@ -33,13 +32,12 @@ import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.SecureIOUtils;

View File

@ -24,10 +24,9 @@ import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
/** A report on the state of a task.
* @deprecated Use {@link org.apache.hadoop.mapreduce.TaskReport} instead
**/
@Deprecated
/**
* A report on the state of a task.
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
public class TaskReport extends org.apache.hadoop.mapreduce.TaskReport {

View File

@ -27,13 +27,11 @@ import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.io.compress.*;
/** An {@link InputFormat} for plain text files. Files are broken into lines.
/**
* An {@link InputFormat} for plain text files. Files are broken into lines.
* Either linefeed or carriage-return are used to signal end of line. Keys are
* the position in the file, and values are the line of text..
* @deprecated Use {@link org.apache.hadoop.mapreduce.lib.input.TextInputFormat}
* instead.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class TextInputFormat extends FileInputFormat<LongWritable, Text>

View File

@ -34,11 +34,9 @@ import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.GzipCodec;
import org.apache.hadoop.util.*;
/** An {@link OutputFormat} that writes plain text files.
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.output.TextOutputFormat} instead.
/**
* An {@link OutputFormat} that writes plain text files.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class TextOutputFormat<K, V> extends FileOutputFormat<K, V> {

View File

@ -32,10 +32,6 @@ import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.JobID;
import org.apache.hadoop.mapreduce.lib.jobcontrol.ControlledJob;
/**
* @deprecated Use {@link ControlledJob} instead.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class Job extends ControlledJob {

View File

@ -26,11 +26,6 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.mapreduce.lib.jobcontrol.ControlledJob;
/**
*@deprecated Use
*{@link org.apache.hadoop.mapreduce.lib.jobcontrol.JobControl} instead
**/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class JobControl extends

View File

@ -28,10 +28,7 @@ import org.apache.hadoop.io.Writable;
* implementation uses an {@link java.util.ArrayList} to store elements
* added to it, replaying them as requested.
* Prefer {@link StreamBackedIterator}.
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.join.ArrayListBackedIterator} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class ArrayListBackedIterator<X extends Writable> extends

View File

@ -32,11 +32,7 @@ import org.apache.hadoop.mapred.Reporter;
/**
* Refinement of InputFormat requiring implementors to provide
* ComposableRecordReader instead of RecordReader.
*
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.join.ComposableInputFormat} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public interface ComposableInputFormat<K extends WritableComparable,

View File

@ -28,10 +28,7 @@ import org.apache.hadoop.mapred.RecordReader;
/**
* Additional operations required of a RecordReader to participate in a join.
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.join.ComposableRecordReader} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public interface ComposableRecordReader<K extends WritableComparable,

View File

@ -46,10 +46,7 @@ import org.apache.hadoop.mapred.Reporter;
* in the join.
* @see JoinRecordReader
* @see MultiFilterRecordReader
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.join.CompositeInputFormat} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class CompositeInputFormat<K extends WritableComparable>

View File

@ -33,11 +33,7 @@ import org.apache.hadoop.util.ReflectionUtils;
/**
* This InputSplit contains a set of child InputSplits. Any InputSplit inserted
* into this collection must have a public default constructor.
*
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.join.CompositeInputSplit} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class CompositeInputSplit implements InputSplit {

View File

@ -37,11 +37,7 @@ import org.apache.hadoop.util.ReflectionUtils;
/**
* A RecordReader that can effect joins of RecordReaders sharing a common key
* type and partitioning.
*
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.join.CompositeRecordReader} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public abstract class CompositeRecordReader<

View File

@ -28,11 +28,7 @@ import org.apache.hadoop.mapred.JobConf;
/**
* Full inner join.
*
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.join.InnerJoinRecordReader} instead.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class InnerJoinRecordReader<K extends WritableComparable>

View File

@ -31,10 +31,7 @@ import org.apache.hadoop.mapred.JobConf;
/**
* Base class for Composite joins returning Tuples of arbitrary Writables.
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.join.JoinRecordReader} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public abstract class JoinRecordReader<K extends WritableComparable>

View File

@ -34,10 +34,7 @@ import org.apache.hadoop.mapred.RecordReader;
/**
* Base class for Composite join returning values derived from multiple
* sources, but generally not tuples.
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.join.MultiFilterRecordReader} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public abstract class MultiFilterRecordReader<K extends WritableComparable,

View File

@ -28,11 +28,7 @@ import org.apache.hadoop.mapred.JobConf;
/**
* Full outer join.
*
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.join.OuterJoinRecordReader} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class OuterJoinRecordReader<K extends WritableComparable>

View File

@ -34,10 +34,7 @@ import org.apache.hadoop.mapred.JobConf;
* For example, <tt>override(S1,S2,S3)</tt> will prefer values
* from S3 over S2, and values from S2 over S1 for all keys
* emitted from all sources.
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.join.OverrideRecordReader} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class OverrideRecordReader<K extends WritableComparable,

View File

@ -61,9 +61,7 @@ import org.apache.hadoop.util.ReflectionUtils;
* straightforward. One need only override the relevant method(s) (usually only
* {@link CompositeRecordReader#combine}) and include a property to map its
* value to an identifier in the parser.
* @deprecated Use {@link org.apache.hadoop.mapreduce.lib.join.Parser} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Evolving
public class Parser {

View File

@ -25,11 +25,7 @@ import org.apache.hadoop.io.Writable;
* This defines an interface to a stateful Iterator that can replay elements
* added to it directly.
* Note that this does not extend {@link java.util.Iterator}.
*
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.join.ResetableIterator} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public interface ResetableIterator<T extends Writable>

View File

@ -24,11 +24,7 @@ import org.apache.hadoop.io.Writable;
/**
* This class provides an implementation of ResetableIterator. This
* implementation uses a byte array to store elements added to it.
*
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.join.StreamBackedIterator} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class StreamBackedIterator<X extends Writable>

View File

@ -33,11 +33,7 @@ import org.apache.hadoop.io.Writable;
* incompatible with, but contrary to the general case.
*
* @see org.apache.hadoop.io.Writable
*
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.join.TupleWritable} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class TupleWritable

View File

@ -33,10 +33,7 @@ import org.apache.hadoop.mapred.RecordReader;
* This class keeps track of the &quot;head&quot; key-value pair for the
* provided RecordReader and keeps a store of values matching a key when
* this source is participating in a join.
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.join.WrappedRecordReader} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class WrappedRecordReader<K extends WritableComparable,

View File

@ -29,11 +29,7 @@ import org.apache.hadoop.mapred.Partitioner;
* the bytes array returned by {@link BinaryComparable#getBytes()}.
*
* @see org.apache.hadoop.mapreduce.lib.partition.BinaryPartitioner
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.partition.BinaryPartitioner}
* instead.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class BinaryPartitioner<V>

View File

@ -35,9 +35,7 @@ import java.util.List;
/**
* The Chain class provides all the common functionality for the
* {@link ChainMapper} and the {@link ChainReducer} classes.
* @deprecated Use {@link org.apache.hadoop.mapreduce.lib.chain.Chain} instead
*/
@Deprecated
class Chain extends org.apache.hadoop.mapreduce.lib.chain.Chain {
private static final String MAPPER_BY_VALUE = "chain.mapper.byValue";

View File

@ -88,10 +88,7 @@ import java.io.IOException;
* RunningJob job = jc.submitJob(conf);
* ...
* </pre>
* @deprecated
* Use {@link org.apache.hadoop.mapreduce.lib.chain.ChainMapper} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class ChainMapper implements Mapper {

View File

@ -88,10 +88,7 @@ import java.util.Iterator;
* RunningJob job = jc.submitJob(conf);
* ...
* </pre>
* @deprecated
* Use {@link org.apache.hadoop.mapreduce.lib.chain.ChainReducer} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class ChainReducer implements Reducer {

View File

@ -24,14 +24,12 @@ import java.util.List;
import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.PathFilter;
import org.apache.hadoop.mapred.FileInputFormat;
import org.apache.hadoop.mapred.InputFormat;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.RecordReader;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.TaskAttemptContext;
@ -53,10 +51,7 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext;
* Subclasses implement {@link org.apache.hadoop.mapred.InputFormat#getRecordReader(InputSplit, JobConf, Reporter)}
* to construct <code>RecordReader</code>'s for <code>CombineFileSplit</code>'s.
* @see CombineFileSplit
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.input.CombineFileInputFormat}
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public abstract class CombineFileInputFormat<K, V>

View File

@ -35,10 +35,7 @@ import org.apache.hadoop.conf.Configuration;
* This class allows using different RecordReaders for processing
* these data chunks from different files.
* @see CombineFileSplit
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.input.CombineFileRecordReader}
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class CombineFileRecordReader<K, V> implements RecordReader<K, V> {

View File

@ -26,11 +26,6 @@ import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.InputSplit;
import org.apache.hadoop.mapred.JobConf;
/**
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.input.CombineFileSplit}
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class CombineFileSplit extends

View File

@ -43,10 +43,7 @@ import org.apache.hadoop.util.ReflectionUtils;
* InputFormats.
*
* @see MultipleInputs#addInputPath(JobConf, Path, Class, Class)
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.input.DelegatingInputFormat} instead
*/
@Deprecated
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class DelegatingInputFormat<K, V> implements InputFormat<K, V> {

View File

@ -34,10 +34,7 @@ import org.apache.hadoop.util.ReflectionUtils;
* mappers.
*
* @see MultipleInputs#addInputPath(JobConf, Path, Class, Class)
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.input.DelegatingMapper} instead
*/
@Deprecated
@InterfaceAudience.Private
@InterfaceStability.Unstable
public class DelegatingMapper<K1, V1, K2, V2> implements Mapper<K1, V1, K2, V2> {

View File

@ -65,10 +65,7 @@ import org.apache.hadoop.mapreduce.lib.fieldsel.*;
*
* The reducer extracts output key/value pairs in a similar manner, except that
* the key is never ignored.
* @deprecated Use {@link FieldSelectionMapper} and
* {@link FieldSelectionReducer} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class FieldSelectionMapReduce<K, V>

View File

@ -31,10 +31,7 @@ import org.apache.hadoop.util.Progressable;
/**
* FilterOutputFormat is a convenience class that wraps OutputFormat.
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.output.FilterOutputFormat} instead.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class FilterOutputFormat<K, V> implements OutputFormat<K, V> {

View File

@ -23,11 +23,9 @@ import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.mapred.Partitioner;
import org.apache.hadoop.mapred.JobConf;
/** Partition keys by their {@link Object#hashCode()}.
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.partition.HashPartitioner} instead.
/**
* Partition keys by their {@link Object#hashCode()}.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class HashPartitioner<K2, V2> implements Partitioner<K2, V2> {

View File

@ -27,10 +27,9 @@ import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.MapReduceBase;
/** Implements the identity function, mapping inputs directly to outputs.
* @deprecated Use {@link org.apache.hadoop.mapreduce.Mapper} instead.
/**
* Implements the identity function, mapping inputs directly to outputs.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class IdentityMapper<K, V>

View File

@ -29,10 +29,9 @@ import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.MapReduceBase;
/** Performs no reduction, writing all input values directly to the output.
* @deprecated Use {@link org.apache.hadoop.mapreduce.Reducer} instead.
/**
* Performs no reduction, writing all input values directly to the output.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class IdentityReducer<K, V>

View File

@ -25,11 +25,6 @@ import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.Job;
/**
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.partition.InputSampler}
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class InputSampler<K,V> extends

View File

@ -27,11 +27,9 @@ import org.apache.hadoop.mapred.Mapper;
import org.apache.hadoop.mapred.OutputCollector;
import org.apache.hadoop.mapred.Reporter;
/** A {@link Mapper} that swaps keys and values.
* @deprecated Use {@link org.apache.hadoop.mapreduce.lib.map.InverseMapper}
* instead.
/**
* A {@link Mapper} that swaps keys and values.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class InverseMapper<K, V>

View File

@ -38,11 +38,7 @@ import org.apache.hadoop.mapreduce.JobContext;
* field). opts are ordering options (any of 'nr' as described above).
* We assume that the fields in the key are separated by
* {@link JobContext#MAP_OUTPUT_KEY_FIELD_SEPERATOR}
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.partition.KeyFieldBasedComparator}
* instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class KeyFieldBasedComparator<K, V> extends

View File

@ -34,11 +34,7 @@ import org.apache.hadoop.mapred.Partitioner;
* field's last character. If '.c' is omitted from pos1, it defaults to 1
* (the beginning of the field); if omitted from pos2, it defaults to 0
* (the end of the field).
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.partition.KeyFieldBasedPartitioner}
* instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class KeyFieldBasedPartitioner<K2, V2> extends

View File

@ -32,10 +32,7 @@ import org.apache.hadoop.util.ReflectionUtils;
/**
* A Convenience class that creates output lazily.
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.output.LazyOutputFormat} instead.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class LazyOutputFormat<K, V> extends FilterOutputFormat<K, V> {

View File

@ -30,11 +30,9 @@ import org.apache.hadoop.classification.InterfaceAudience;
import org.apache.hadoop.classification.InterfaceStability;
import org.apache.hadoop.io.LongWritable;
/** A {@link Reducer} that sums long values.
* @deprecated Use {@link org.apache.hadoop.mapreduce.lib.reduce.LongSumReducer}
* instead.
/**
* A {@link Reducer} that sums long values.
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class LongSumReducer<K> extends MapReduceBase

View File

@ -32,10 +32,7 @@ import org.apache.hadoop.util.ReflectionUtils;
/**
* This class supports MapReduce jobs that have multiple input paths with
* a different {@link InputFormat} and {@link Mapper} for each path
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.input.MultipleInputs} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public class MultipleInputs {

View File

@ -50,11 +50,7 @@ import org.apache.hadoop.util.Progressable;
*
* Case three: This class is used for a map only job. The job wants to use an
* output file name that depends on both the keys and the input file name,
*
* @deprecated Use
* {@link org.apache.hadoop.mapreduce.lib.output.MultipleOutputs} instead
*/
@Deprecated
@InterfaceAudience.Public
@InterfaceStability.Stable
public abstract class MultipleOutputFormat<K, V>

Some files were not shown because too many files have changed in this diff Show More