MAPREDUCE-6887. Modifier 'static' is redundant for inner enums. Contributed by ZhangBing Lin.
This commit is contained in:
parent
a7f085d6bf
commit
d4015f8628
|
@ -102,7 +102,7 @@ public class ContainerLauncherImpl extends AbstractService implements
|
|||
}
|
||||
}
|
||||
|
||||
private static enum ContainerState {
|
||||
private enum ContainerState {
|
||||
PREP, FAILED, RUNNING, DONE, KILLED_BEFORE_LAUNCH
|
||||
}
|
||||
|
||||
|
|
|
@ -114,7 +114,7 @@ public class MRApps extends Apps {
|
|||
throw new YarnRuntimeException("Unknown task type: "+ type.toString());
|
||||
}
|
||||
|
||||
public static enum TaskAttemptStateUI {
|
||||
public enum TaskAttemptStateUI {
|
||||
NEW(
|
||||
new TaskAttemptState[] { TaskAttemptState.NEW,
|
||||
TaskAttemptState.STARTING }),
|
||||
|
@ -136,7 +136,7 @@ public class MRApps extends Apps {
|
|||
}
|
||||
}
|
||||
|
||||
public static enum TaskStateUI {
|
||||
public enum TaskStateUI {
|
||||
RUNNING(
|
||||
new TaskState[]{TaskState.RUNNING}),
|
||||
PENDING(new TaskState[]{TaskState.SCHEDULED}),
|
||||
|
|
|
@ -72,7 +72,7 @@ public abstract class FileInputFormat<K, V> implements InputFormat<K, V> {
|
|||
LogFactory.getLog(FileInputFormat.class);
|
||||
|
||||
@Deprecated
|
||||
public static enum Counter {
|
||||
public enum Counter {
|
||||
BYTES_READ
|
||||
}
|
||||
|
||||
|
|
|
@ -36,7 +36,7 @@ import org.apache.hadoop.util.Progressable;
|
|||
public abstract class FileOutputFormat<K, V> implements OutputFormat<K, V> {
|
||||
|
||||
@Deprecated
|
||||
public static enum Counter {
|
||||
public enum Counter {
|
||||
BYTES_WRITTEN
|
||||
}
|
||||
|
||||
|
|
|
@ -152,7 +152,7 @@ public class JobClient extends CLI implements AutoCloseable {
|
|||
public static final String MAPREDUCE_CLIENT_RETRY_POLICY_SPEC_DEFAULT =
|
||||
"10000,6,60000,10"; // t1,n1,t2,n2,...
|
||||
|
||||
public static enum TaskStatusFilter { NONE, KILLED, FAILED, SUCCEEDED, ALL }
|
||||
public enum TaskStatusFilter { NONE, KILLED, FAILED, SUCCEEDED, ALL }
|
||||
private TaskStatusFilter taskOutputFilter = TaskStatusFilter.FAILED;
|
||||
|
||||
private int maxRetry = MRJobConfig.DEFAULT_MR_CLIENT_JOB_MAX_RETRIES;
|
||||
|
|
|
@ -30,7 +30,7 @@ public class JobInProgress {
|
|||
* @deprecated Provided for compatibility. Use {@link JobCounter} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public static enum Counter {
|
||||
public enum Counter {
|
||||
NUM_FAILED_MAPS,
|
||||
NUM_FAILED_REDUCES,
|
||||
TOTAL_LAUNCHED_MAPS,
|
||||
|
|
|
@ -28,7 +28,7 @@ public class JobTracker {
|
|||
* <code>State</code> is no longer used since M/R 2.x. It is kept in case
|
||||
* that M/R 1.x applications may still use it.
|
||||
*/
|
||||
public static enum State {
|
||||
public enum State {
|
||||
INITIALIZING, RUNNING
|
||||
}
|
||||
|
||||
|
|
|
@ -90,7 +90,7 @@ abstract public class Task implements Writable, Configurable {
|
|||
* @deprecated Provided for compatibility. Use {@link TaskCounter} instead.
|
||||
*/
|
||||
@Deprecated
|
||||
public static enum Counter {
|
||||
public enum Counter {
|
||||
MAP_INPUT_RECORDS,
|
||||
MAP_OUTPUT_RECORDS,
|
||||
MAP_SKIPPED_RECORDS,
|
||||
|
|
|
@ -358,7 +358,7 @@ public class TaskLog {
|
|||
* The filter for userlogs.
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
public static enum LogName {
|
||||
public enum LogName {
|
||||
/** Log on the stdout of the task. */
|
||||
STDOUT ("stdout"),
|
||||
|
||||
|
|
|
@ -45,12 +45,12 @@ public abstract class TaskStatus implements Writable, Cloneable {
|
|||
//enumeration for reporting current phase of a task.
|
||||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Unstable
|
||||
public static enum Phase{STARTING, MAP, SHUFFLE, SORT, REDUCE, CLEANUP}
|
||||
public enum Phase{STARTING, MAP, SHUFFLE, SORT, REDUCE, CLEANUP}
|
||||
|
||||
// what state is the task in?
|
||||
@InterfaceAudience.Private
|
||||
@InterfaceStability.Unstable
|
||||
public static enum State {RUNNING, SUCCEEDED, FAILED, UNASSIGNED, KILLED,
|
||||
public enum State {RUNNING, SUCCEEDED, FAILED, UNASSIGNED, KILLED,
|
||||
COMMIT_PENDING, FAILED_UNCLEAN, KILLED_UNCLEAN, PREEMPTED}
|
||||
|
||||
private final TaskAttemptID taskid;
|
||||
|
|
|
@ -68,7 +68,7 @@ class BinaryProtocol<K1 extends WritableComparable, V1 extends Writable,
|
|||
* The integer codes to represent the different messages. These must match
|
||||
* the C++ codes or massive confusion will result.
|
||||
*/
|
||||
private static enum MessageType { START(0),
|
||||
private enum MessageType { START(0),
|
||||
SET_JOB_CONF(1),
|
||||
SET_INPUT_TYPES(2),
|
||||
RUN_MAP(3),
|
||||
|
|
|
@ -54,7 +54,7 @@ import org.apache.hadoop.security.token.Token;
|
|||
public class Cluster {
|
||||
|
||||
@InterfaceStability.Evolving
|
||||
public static enum JobTrackerStatus {INITIALIZING, RUNNING};
|
||||
public enum JobTrackerStatus {INITIALIZING, RUNNING};
|
||||
|
||||
private ClientProtocolProvider clientProtocolProvider;
|
||||
private ClientProtocol client;
|
||||
|
|
|
@ -79,7 +79,7 @@ public class Job extends JobContextImpl implements JobContext {
|
|||
private static final Log LOG = LogFactory.getLog(Job.class);
|
||||
|
||||
@InterfaceStability.Evolving
|
||||
public static enum JobState {DEFINE, RUNNING};
|
||||
public enum JobState {DEFINE, RUNNING};
|
||||
private static final long MAX_JOBSTATUS_AGE = 1000 * 2;
|
||||
public static final String OUTPUT_FILTER = "mapreduce.client.output.filter";
|
||||
/** Key in mapred-*.xml that sets completionPollInvervalMillis */
|
||||
|
@ -104,7 +104,7 @@ public class Job extends JobContextImpl implements JobContext {
|
|||
public static final boolean DEFAULT_USE_WILDCARD_FOR_LIBJARS = true;
|
||||
|
||||
@InterfaceStability.Evolving
|
||||
public static enum TaskStatusFilter { NONE, KILLED, FAILED, SUCCEEDED, ALL }
|
||||
public enum TaskStatusFilter { NONE, KILLED, FAILED, SUCCEEDED, ALL }
|
||||
|
||||
static {
|
||||
ConfigUtil.loadResources();
|
||||
|
|
|
@ -52,7 +52,7 @@ public class JobStatus implements Writable, Cloneable {
|
|||
/**
|
||||
* Current state of the job
|
||||
*/
|
||||
public static enum State {
|
||||
public enum State {
|
||||
RUNNING(1),
|
||||
SUCCEEDED(2),
|
||||
FAILED(3),
|
||||
|
|
|
@ -85,7 +85,7 @@ public abstract class FileInputFormat<K, V> extends InputFormat<K, V> {
|
|||
private static final double SPLIT_SLOP = 1.1; // 10% slop
|
||||
|
||||
@Deprecated
|
||||
public static enum Counter {
|
||||
public enum Counter {
|
||||
BYTES_READ
|
||||
}
|
||||
|
||||
|
|
|
@ -52,7 +52,7 @@ public class ControlledJob {
|
|||
private static final Log LOG = LogFactory.getLog(ControlledJob.class);
|
||||
|
||||
// A job will be in one of the following states
|
||||
public static enum State {SUCCESS, WAITING, RUNNING, READY, FAILED,
|
||||
public enum State {SUCCESS, WAITING, RUNNING, READY, FAILED,
|
||||
DEPENDENT_FAILED};
|
||||
public static final String CREATE_DIR = "mapreduce.jobcontrol.createdir.ifnotexist";
|
||||
private State state;
|
||||
|
|
|
@ -58,7 +58,7 @@ public class JobControl implements Runnable {
|
|||
private static final Log LOG = LogFactory.getLog(JobControl.class);
|
||||
|
||||
// The thread can be in one of the following state
|
||||
public static enum ThreadState {RUNNING, SUSPENDED,STOPPED, STOPPING, READY};
|
||||
public enum ThreadState {RUNNING, SUSPENDED,STOPPED, STOPPING, READY};
|
||||
|
||||
private ThreadState runnerState; // the thread state
|
||||
|
||||
|
|
|
@ -61,7 +61,7 @@ public static final String COMPRESS_TYPE = "mapreduce.output.fileoutputformat.co
|
|||
public static final String OUTDIR = "mapreduce.output.fileoutputformat.outputdir";
|
||||
|
||||
@Deprecated
|
||||
public static enum Counter {
|
||||
public enum Counter {
|
||||
BYTES_WRITTEN
|
||||
}
|
||||
|
||||
|
|
|
@ -71,7 +71,7 @@ class Fetcher<K,V> extends Thread {
|
|||
private static final String FETCH_RETRY_AFTER_HEADER = "Retry-After";
|
||||
|
||||
protected final Reporter reporter;
|
||||
private static enum ShuffleErrors{IO_ERROR, WRONG_LENGTH, BAD_ID, WRONG_MAP,
|
||||
private enum ShuffleErrors{IO_ERROR, WRONG_LENGTH, BAD_ID, WRONG_MAP,
|
||||
CONNECTION, WRONG_REDUCE}
|
||||
|
||||
private final static String SHUFFLE_ERR_GRP_NAME = "Shuffle Errors";
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.apache.hadoop.mapreduce.TaskAttemptID;
|
|||
@InterfaceStability.Unstable
|
||||
public class MapHost {
|
||||
|
||||
public static enum State {
|
||||
public enum State {
|
||||
IDLE, // No map outputs available
|
||||
BUSY, // Map outputs are being fetched
|
||||
PENDING, // Known map outputs which need to be fetched
|
||||
|
|
|
@ -29,7 +29,7 @@ import org.apache.hadoop.ipc.Server;
|
|||
public class HSAuditLogger {
|
||||
private static final Log LOG = LogFactory.getLog(HSAuditLogger.class);
|
||||
|
||||
static enum Keys {
|
||||
enum Keys {
|
||||
USER, OPERATION, TARGET, RESULT, IP, PERMISSIONS, DESCRIPTION
|
||||
}
|
||||
|
||||
|
|
|
@ -87,7 +87,7 @@ public class HistoryFileManager extends AbstractService {
|
|||
private static final Log LOG = LogFactory.getLog(HistoryFileManager.class);
|
||||
private static final Log SUMMARY_LOG = LogFactory.getLog(JobSummary.class);
|
||||
|
||||
private static enum HistoryInfoState {
|
||||
private enum HistoryInfoState {
|
||||
IN_INTERMEDIATE, IN_DONE, DELETED, MOVE_FAILED
|
||||
};
|
||||
|
||||
|
|
|
@ -53,7 +53,7 @@ public class RandomTextWriterJob extends Configured implements Tool {
|
|||
public static final String MIN_KEY = "mapreduce.randomtextwriter.minwordskey";
|
||||
public static final String MAX_KEY = "mapreduce.randomtextwriter.maxwordskey";
|
||||
|
||||
static enum Counters { RECORDS_WRITTEN, BYTES_WRITTEN }
|
||||
enum Counters { RECORDS_WRITTEN, BYTES_WRITTEN }
|
||||
|
||||
public Job createJob(Configuration conf) throws IOException {
|
||||
long numBytesToWritePerMap = conf.getLong(BYTES_PER_MAP, 10 * 1024);
|
||||
|
|
|
@ -162,7 +162,7 @@ public class JHLogAnalyzer {
|
|||
Configuration.addDefaultResource("hdfs-site.xml");
|
||||
}
|
||||
|
||||
static enum StatSeries {
|
||||
enum StatSeries {
|
||||
STAT_ALL_SLOT_TIME
|
||||
(AccumulatingReducer.VALUE_TYPE_LONG + "allSlotTime"),
|
||||
STAT_FAILED_SLOT_TIME
|
||||
|
|
|
@ -123,7 +123,7 @@ public class TestDFSIO implements Tool {
|
|||
Configuration.addDefaultResource("mapred-site.xml");
|
||||
}
|
||||
|
||||
private static enum TestType {
|
||||
private enum TestType {
|
||||
TEST_TYPE_READ("read"),
|
||||
TEST_TYPE_WRITE("write"),
|
||||
TEST_TYPE_CLEANUP("cleanup"),
|
||||
|
@ -145,7 +145,7 @@ public class TestDFSIO implements Tool {
|
|||
}
|
||||
}
|
||||
|
||||
static enum ByteMultiple {
|
||||
enum ByteMultiple {
|
||||
B(1L),
|
||||
KB(0x400L),
|
||||
MB(0x100000L),
|
||||
|
|
|
@ -46,7 +46,7 @@ class OperationOutput {
|
|||
private static final String MEASUREMENT_SEP = "*";
|
||||
private static final String STRING_SEP = ";";
|
||||
|
||||
static enum OutputType {
|
||||
enum OutputType {
|
||||
STRING, FLOAT, LONG, DOUBLE, INTEGER
|
||||
}
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@ import org.apache.hadoop.fs.Path;
|
|||
*/
|
||||
class PathFinder {
|
||||
|
||||
private static enum Type {
|
||||
private enum Type {
|
||||
FILE, DIRECTORY
|
||||
}
|
||||
|
||||
|
|
|
@ -232,7 +232,7 @@ public class GenericMRLoadGenerator extends Configured implements Tool {
|
|||
}
|
||||
}
|
||||
|
||||
static enum Counters { RECORDS_WRITTEN, BYTES_WRITTEN }
|
||||
enum Counters { RECORDS_WRITTEN, BYTES_WRITTEN }
|
||||
|
||||
static class RandomMapOutput extends MapReduceBase
|
||||
implements Mapper<Text,Text,Text,Text> {
|
||||
|
|
|
@ -48,7 +48,7 @@ public class MRBench extends Configured implements Tool{
|
|||
private static Path INPUT_DIR = new Path(BASE_DIR, DEFAULT_INPUT_SUB);
|
||||
private static Path OUTPUT_DIR = new Path(BASE_DIR, DEFAULT_OUTPUT_SUB);
|
||||
|
||||
public static enum Order {RANDOM, ASCENDING, DESCENDING};
|
||||
public enum Order {RANDOM, ASCENDING, DESCENDING};
|
||||
|
||||
/**
|
||||
* Takes input format as text lines, runs some processing on it and
|
||||
|
|
|
@ -63,7 +63,7 @@ public class ThreadedMapBenchmark extends Configured implements Tool {
|
|||
// (FACTOR * data_size) should
|
||||
// result in only 1 spill
|
||||
|
||||
static enum Counters { RECORDS_WRITTEN, BYTES_WRITTEN }
|
||||
enum Counters { RECORDS_WRITTEN, BYTES_WRITTEN }
|
||||
|
||||
/**
|
||||
* Generates random input data of given size with keys and values of given
|
||||
|
|
|
@ -241,7 +241,7 @@ public class GenericMRLoadGenerator extends Configured implements Tool {
|
|||
}
|
||||
}
|
||||
|
||||
static enum Counters { RECORDS_WRITTEN, BYTES_WRITTEN }
|
||||
enum Counters { RECORDS_WRITTEN, BYTES_WRITTEN }
|
||||
|
||||
static class RandomMapOutput extends Mapper<Text,Text,Text,Text> {
|
||||
StringBuilder sentence = new StringBuilder();
|
||||
|
|
|
@ -67,7 +67,7 @@ public class LargeSorter extends Configured implements Tool {
|
|||
/**
|
||||
* User counters
|
||||
*/
|
||||
static enum Counters { RECORDS_WRITTEN, BYTES_WRITTEN }
|
||||
enum Counters { RECORDS_WRITTEN, BYTES_WRITTEN }
|
||||
|
||||
/**
|
||||
* A custom input format that creates virtual inputs of a single string
|
||||
|
|
|
@ -97,7 +97,7 @@ public class RandomTextWriter extends Configured implements Tool {
|
|||
/**
|
||||
* User counters
|
||||
*/
|
||||
static enum Counters { RECORDS_WRITTEN, BYTES_WRITTEN }
|
||||
enum Counters { RECORDS_WRITTEN, BYTES_WRITTEN }
|
||||
|
||||
static class RandomTextMapper extends Mapper<Text, Text, Text, Text> {
|
||||
|
||||
|
|
|
@ -89,7 +89,7 @@ public class RandomWriter extends Configured implements Tool {
|
|||
/**
|
||||
* User counters
|
||||
*/
|
||||
static enum Counters { RECORDS_WRITTEN, BYTES_WRITTEN }
|
||||
enum Counters { RECORDS_WRITTEN, BYTES_WRITTEN }
|
||||
|
||||
/**
|
||||
* A custom input format that creates virtual inputs of a single string
|
||||
|
|
|
@ -157,7 +157,7 @@ public class TimelineServicePerformance extends Configured implements Tool {
|
|||
/**
|
||||
* TimelineServer Performance counters
|
||||
*/
|
||||
static enum PerfCounters {
|
||||
enum PerfCounters {
|
||||
TIMELINE_SERVICE_WRITE_TIME,
|
||||
TIMELINE_SERVICE_WRITE_COUNTER,
|
||||
TIMELINE_SERVICE_WRITE_FAILURES,
|
||||
|
|
|
@ -97,7 +97,7 @@ public class RandomTextWriter extends Configured implements Tool {
|
|||
/**
|
||||
* User counters
|
||||
*/
|
||||
static enum Counters { RECORDS_WRITTEN, BYTES_WRITTEN }
|
||||
enum Counters { RECORDS_WRITTEN, BYTES_WRITTEN }
|
||||
|
||||
static class RandomTextMapper extends Mapper<Text, Text, Text, Text> {
|
||||
|
||||
|
|
|
@ -89,7 +89,7 @@ public class RandomWriter extends Configured implements Tool {
|
|||
/**
|
||||
* User counters
|
||||
*/
|
||||
static enum Counters { RECORDS_WRITTEN, BYTES_WRITTEN }
|
||||
enum Counters { RECORDS_WRITTEN, BYTES_WRITTEN }
|
||||
|
||||
/**
|
||||
* A custom input format that creates virtual inputs of a single string
|
||||
|
|
|
@ -68,7 +68,7 @@ import org.apache.hadoop.util.ToolRunner;
|
|||
public class TeraGen extends Configured implements Tool {
|
||||
private static final Log LOG = LogFactory.getLog(TeraGen.class);
|
||||
|
||||
public static enum Counters {CHECKSUM}
|
||||
public enum Counters {CHECKSUM}
|
||||
|
||||
/**
|
||||
* An input format that assigns ranges of longs to each mapper.
|
||||
|
|
Loading…
Reference in New Issue