HADOOP-14250. Correct spelling of 'separate' and variants. Contributed by Doris Gu

This commit is contained in:
Chris Douglas 2017-03-30 10:14:22 -07:00
parent c8bd5fc7a8
commit 7297e82cb4
20 changed files with 55 additions and 34 deletions

View File

@ -45,7 +45,7 @@ public class NodeBase implements Node {
/** Construct a node from its path
* @param path
* a concatenation of this node's location, the path seperator, and its name
* a concatenation of this node's location, the path separator, and its name
*/
public NodeBase(String path) {
path = normalize(path);

View File

@ -372,8 +372,8 @@ public class StringUtils {
/**
* Returns an arraylist of strings.
* @param str the comma seperated string values
* @return the arraylist of the comma seperated string values
* @param str the comma separated string values
* @return the arraylist of the comma separated string values
*/
public static String[] getStrings(String str){
String delim = ",";
@ -384,7 +384,7 @@ public class StringUtils {
* Returns an arraylist of strings.
* @param str the string values
* @param delim delimiter to separate the values
* @return the arraylist of the seperated string values
* @return the arraylist of the separated string values
*/
public static String[] getStrings(String str, String delim){
Collection<String> values = getStringCollection(str, delim);
@ -396,7 +396,7 @@ public class StringUtils {
/**
* Returns a collection of strings.
* @param str comma seperated string values
* @param str comma separated string values
* @return an <code>ArrayList</code> of string values
*/
public static Collection<String> getStringCollection(String str){

View File

@ -358,7 +358,7 @@ public class FSOperations {
* Creates a Concat executor.
*
* @param path target path to concat to.
* @param sources comma seperated absolute paths to use as sources.
* @param sources comma separated absolute paths to use as sources.
*/
public FSConcat(String path, String[] sources) {
this.sources = new Path[sources.length];

View File

@ -159,7 +159,7 @@ public class FieldSelectionMapReduce<K, V>
}
public void configure(JobConf job) {
this.fieldSeparator = job.get(FieldSelectionHelper.DATA_FIELD_SEPERATOR,
this.fieldSeparator = job.get(FieldSelectionHelper.DATA_FIELD_SEPARATOR,
"\t");
this.mapOutputKeyValueSpec = job.get(
FieldSelectionHelper.MAP_OUTPUT_KEY_VALUE_SPEC, "0-:");

View File

@ -37,7 +37,7 @@ import org.apache.hadoop.mapreduce.JobContext;
* of the field); if omitted from pos2, it defaults to 0 (the end of the
* field). opts are ordering options (any of 'nr' as described above).
* We assume that the fields in the key are separated by
* {@link JobContext#MAP_OUTPUT_KEY_FIELD_SEPERATOR}
* {@link JobContext#MAP_OUTPUT_KEY_FIELD_SEPARATOR}
*/
@InterfaceAudience.Public
@InterfaceStability.Stable

View File

@ -318,7 +318,13 @@ public interface MRJobConfig {
public static final String MAP_OUTPUT_VALUE_CLASS = "mapreduce.map.output.value.class";
public static final String MAP_OUTPUT_KEY_FIELD_SEPERATOR = "mapreduce.map.output.key.field.separator";
public static final String MAP_OUTPUT_KEY_FIELD_SEPARATOR = "mapreduce.map.output.key.field.separator";
/**
* @deprecated Use {@link #MAP_OUTPUT_KEY_FIELD_SEPARATOR}
*/
@Deprecated
public static final String MAP_OUTPUT_KEY_FIELD_SEPERATOR = MAP_OUTPUT_KEY_FIELD_SEPARATOR;
public static final String MAP_LOG_LEVEL = "mapreduce.map.log.level";

View File

@ -60,8 +60,13 @@ import org.apache.hadoop.io.Text;
public class FieldSelectionHelper {
public static Text emptyText = new Text("");
public static final String DATA_FIELD_SEPERATOR =
public static final String DATA_FIELD_SEPARATOR =
"mapreduce.fieldsel.data.field.separator";
/**
* @deprecated Use {@link #DATA_FIELD_SEPARATOR}
*/
@Deprecated
public static final String DATA_FIELD_SEPERATOR = DATA_FIELD_SEPARATOR;
public static final String MAP_OUTPUT_KEY_VALUE_SPEC =
"mapreduce.fieldsel.map.output.key.value.fields.spec";
public static final String REDUCE_OUTPUT_KEY_VALUE_SPEC =

View File

@ -79,7 +79,7 @@ public class FieldSelectionMapper<K, V>
throws IOException, InterruptedException {
Configuration conf = context.getConfiguration();
this.fieldSeparator =
conf.get(FieldSelectionHelper.DATA_FIELD_SEPERATOR, "\t");
conf.get(FieldSelectionHelper.DATA_FIELD_SEPARATOR, "\t");
this.mapOutputKeyValueSpec =
conf.get(FieldSelectionHelper.MAP_OUTPUT_KEY_VALUE_SPEC, "0-:");
try {

View File

@ -77,7 +77,7 @@ public class FieldSelectionReducer<K, V>
Configuration conf = context.getConfiguration();
this.fieldSeparator =
conf.get(FieldSelectionHelper.DATA_FIELD_SEPERATOR, "\t");
conf.get(FieldSelectionHelper.DATA_FIELD_SEPARATOR, "\t");
this.reduceOutputKeyValueSpec =
conf.get(FieldSelectionHelper.REDUCE_OUTPUT_KEY_VALUE_SPEC, "0-:");

View File

@ -37,8 +37,13 @@ import org.apache.hadoop.mapreduce.TaskAttemptContext;
@InterfaceAudience.Public
@InterfaceStability.Stable
public class KeyValueLineRecordReader extends RecordReader<Text, Text> {
public static final String KEY_VALUE_SEPERATOR =
public static final String KEY_VALUE_SEPARATOR =
"mapreduce.input.keyvaluelinerecordreader.key.value.separator";
/**
* @deprecated Use {@link #KEY_VALUE_SEPARATOR}
*/
@Deprecated
public static final String KEY_VALUE_SEPERATOR = KEY_VALUE_SEPARATOR;
private final LineRecordReader lineRecordReader;
@ -56,7 +61,7 @@ public class KeyValueLineRecordReader extends RecordReader<Text, Text> {
throws IOException {
lineRecordReader = new LineRecordReader();
String sepStr = conf.get(KEY_VALUE_SEPERATOR, "\t");
String sepStr = conf.get(KEY_VALUE_SEPARATOR, "\t");
this.separator = (byte) sepStr.charAt(0);
}

View File

@ -42,7 +42,12 @@ import org.apache.hadoop.util.*;
@InterfaceAudience.Public
@InterfaceStability.Stable
public class TextOutputFormat<K, V> extends FileOutputFormat<K, V> {
public static String SEPERATOR = "mapreduce.output.textoutputformat.separator";
public static String SEPARATOR = "mapreduce.output.textoutputformat.separator";
/**
* @deprecated Use {@link #SEPARATOR}
*/
@Deprecated
public static String SEPERATOR = SEPARATOR;
protected static class LineRecordWriter<K, V>
extends RecordWriter<K, V> {
private static final byte[] NEWLINE =
@ -107,7 +112,7 @@ public class TextOutputFormat<K, V> extends FileOutputFormat<K, V> {
) throws IOException, InterruptedException {
Configuration conf = job.getConfiguration();
boolean isCompressed = getCompressOutput(job);
String keyValueSeparator= conf.get(SEPERATOR, "\t");
String keyValueSeparator= conf.get(SEPARATOR, "\t");
CompressionCodec codec = null;
String extension = "";
if (isCompressed) {

View File

@ -46,7 +46,7 @@ import org.apache.hadoop.mapreduce.lib.partition.KeyFieldHelper.KeyDescription;
* of the field); if omitted from pos2, it defaults to 0 (the end of the
* field). opts are ordering options (any of 'nr' as described above).
* We assume that the fields in the key are separated by
* {@link JobContext#MAP_OUTPUT_KEY_FIELD_SEPERATOR}.
* {@link JobContext#MAP_OUTPUT_KEY_FIELD_SEPARATOR}.
*/
@InterfaceAudience.Public
@InterfaceStability.Stable
@ -62,7 +62,7 @@ public class KeyFieldBasedComparator<K, V> extends WritableComparator
public void setConf(Configuration conf) {
this.conf = conf;
String option = conf.get(COMPARATOR_OPTIONS);
String keyFieldSeparator = conf.get(MRJobConfig.MAP_OUTPUT_KEY_FIELD_SEPERATOR,"\t");
String keyFieldSeparator = conf.get(MRJobConfig.MAP_OUTPUT_KEY_FIELD_SEPARATOR,"\t");
keyFieldHelper.setKeyFieldSeparator(keyFieldSeparator);
keyFieldHelper.parseOption(option);
}

View File

@ -65,7 +65,7 @@ public class KeyFieldBasedPartitioner<K2, V2> extends Partitioner<K2, V2>
this.conf = conf;
keyFieldHelper = new KeyFieldHelper();
String keyFieldSeparator =
conf.get(MRJobConfig.MAP_OUTPUT_KEY_FIELD_SEPERATOR, "\t");
conf.get(MRJobConfig.MAP_OUTPUT_KEY_FIELD_SEPARATOR, "\t");
keyFieldHelper.setKeyFieldSeparator(keyFieldSeparator);
if (conf.get("num.key.fields.for.partition") != null) {
LOG.warn("Using deprecated num.key.fields.for.partition. " +

View File

@ -290,7 +290,7 @@ public class ConfigUtil {
new DeprecationDelta("mapred.mapoutput.value.class",
MRJobConfig.MAP_OUTPUT_VALUE_CLASS),
new DeprecationDelta("map.output.key.field.separator",
MRJobConfig.MAP_OUTPUT_KEY_FIELD_SEPERATOR),
MRJobConfig.MAP_OUTPUT_KEY_FIELD_SEPARATOR),
new DeprecationDelta("mapred.map.child.log.level",
MRJobConfig.MAP_LOG_LEVEL),
new DeprecationDelta("mapred.inmem.merge.threshold",
@ -412,7 +412,7 @@ public class ConfigUtil {
ControlledJob.CREATE_DIR),
new DeprecationDelta("mapred.data.field.separator",
org.apache.hadoop.mapreduce.lib.fieldsel.
FieldSelectionHelper.DATA_FIELD_SEPERATOR),
FieldSelectionHelper.DATA_FIELD_SEPARATOR),
new DeprecationDelta("map.output.key.value.fields.spec",
org.apache.hadoop.mapreduce.lib.fieldsel.
FieldSelectionHelper.MAP_OUTPUT_KEY_VALUE_SPEC),
@ -427,7 +427,7 @@ public class ConfigUtil {
CombineFileInputFormat.SPLIT_MINSIZE_PERRACK),
new DeprecationDelta("key.value.separator.in.input.line",
org.apache.hadoop.mapreduce.lib.input.
KeyValueLineRecordReader.KEY_VALUE_SEPERATOR),
KeyValueLineRecordReader.KEY_VALUE_SEPARATOR),
new DeprecationDelta("mapred.linerecordreader.maxlength",
org.apache.hadoop.mapreduce.lib.input.
LineRecordReader.MAX_LINE_LENGTH),
@ -436,7 +436,7 @@ public class ConfigUtil {
LazyOutputFormat.OUTPUT_FORMAT),
new DeprecationDelta("mapred.textoutputformat.separator",
org.apache.hadoop.mapreduce.lib.output.
TextOutputFormat.SEPERATOR),
TextOutputFormat.SEPARATOR),
new DeprecationDelta("mapred.join.expr",
org.apache.hadoop.mapreduce.lib.join.
CompositeInputFormat.JOIN_EXPR),

View File

@ -65,7 +65,7 @@ class OperationOutput {
int place = key.indexOf(TYPE_SEP);
if (place == -1) {
throw new IllegalArgumentException(
"Invalid key format - no type seperator - " + TYPE_SEP);
"Invalid key format - no type separator - " + TYPE_SEP);
}
try {
dataType = OutputType.valueOf(
@ -78,7 +78,7 @@ class OperationOutput {
place = key.indexOf(MEASUREMENT_SEP);
if (place == -1) {
throw new IllegalArgumentException(
"Invalid key format - no measurement seperator - " + MEASUREMENT_SEP);
"Invalid key format - no measurement separator - " + MEASUREMENT_SEP);
}
opType = key.substring(0, place);
measurementType = key.substring(place + 1);

View File

@ -77,7 +77,7 @@ private static NumberFormat idFormat = NumberFormat.getInstance();
job.setOutputFormat(TextOutputFormat.class);
job.setNumReduceTasks(1);
job.set(FieldSelectionHelper.DATA_FIELD_SEPERATOR, "-");
job.set(FieldSelectionHelper.DATA_FIELD_SEPARATOR, "-");
job.set(FieldSelectionHelper.MAP_OUTPUT_KEY_VALUE_SPEC, "6,5,1-3:0-");
job.set(FieldSelectionHelper.REDUCE_OUTPUT_KEY_VALUE_SPEC, ":4,3,2,1,0,0-");

View File

@ -63,7 +63,7 @@ public class TestKeyFieldBasedComparator extends HadoopTestCase {
super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
conf = createJobConf();
localConf = createJobConf();
localConf.set(JobContext.MAP_OUTPUT_KEY_FIELD_SEPERATOR, " ");
localConf.set(JobContext.MAP_OUTPUT_KEY_FIELD_SEPARATOR, " ");
}
public void configure(String keySpec, int expect) throws Exception {
@ -85,7 +85,7 @@ public class TestKeyFieldBasedComparator extends HadoopTestCase {
conf.setOutputKeyComparatorClass(KeyFieldBasedComparator.class);
conf.setKeyFieldComparatorOptions(keySpec);
conf.setKeyFieldPartitionerOptions("-k1.1,1.1");
conf.set(JobContext.MAP_OUTPUT_KEY_FIELD_SEPERATOR, " ");
conf.set(JobContext.MAP_OUTPUT_KEY_FIELD_SEPARATOR, " ");
conf.setMapperClass(InverseMapper.class);
conf.setReducerClass(IdentityReducer.class);
if (!fs.mkdirs(testdir)) {

View File

@ -57,7 +57,7 @@ private static NumberFormat idFormat = NumberFormat.getInstance();
StringBuffer expectedOutput = new StringBuffer();
constructInputOutputData(inputData, expectedOutput, numOfInputLines);
conf.set(FieldSelectionHelper.DATA_FIELD_SEPERATOR, "-");
conf.set(FieldSelectionHelper.DATA_FIELD_SEPARATOR, "-");
conf.set(FieldSelectionHelper.MAP_OUTPUT_KEY_VALUE_SPEC, "6,5,1-3:0-");
conf.set(
FieldSelectionHelper.REDUCE_OUTPUT_KEY_VALUE_SPEC, ":4,3,2,1,0,0-");

View File

@ -50,7 +50,7 @@ public class TestMRKeyFieldBasedComparator extends HadoopTestCase {
public TestMRKeyFieldBasedComparator() throws IOException {
super(HadoopTestCase.LOCAL_MR, HadoopTestCase.LOCAL_FS, 1, 1);
conf = createJobConf();
conf.set(MRJobConfig.MAP_OUTPUT_KEY_FIELD_SEPERATOR, " ");
conf.set(MRJobConfig.MAP_OUTPUT_KEY_FIELD_SEPARATOR, " ");
}
private void testComparator(String keySpec, int expect)
@ -61,7 +61,7 @@ public class TestMRKeyFieldBasedComparator extends HadoopTestCase {
conf.set("mapreduce.partition.keycomparator.options", keySpec);
conf.set("mapreduce.partition.keypartitioner.options", "-k1.1,1.1");
conf.set(MRJobConfig.MAP_OUTPUT_KEY_FIELD_SEPERATOR, " ");
conf.set(MRJobConfig.MAP_OUTPUT_KEY_FIELD_SEPARATOR, " ");
Job job = MapReduceTestUtil.createJob(conf, inDir, outDir, 1, 1,
line1 +"\n" + line2 + "\n");

View File

@ -1897,7 +1897,7 @@ public class YarnConfiguration extends Configuration {
public static final float DEFAULT_TIMELINE_SERVICE_VERSION = 1.0f;
/**
* Comma seperated list of names for UIs hosted in the timeline server
* Comma separated list of names for UIs hosted in the timeline server
* (For pluggable UIs).
*/
public static final String TIMELINE_SERVICE_UI_NAMES =