From 4840775e3d1485af3983f63ece2fc394b89563ef Mon Sep 17 00:00:00 2001 From: Suresh Srinivas Date: Tue, 26 Feb 2013 00:10:35 +0000 Subject: [PATCH] HADOOP-9323. Fix typos in API documentation. Contributed by Suresh Srinivas. git-svn-id: https://svn.apache.org/repos/asf/hadoop/common/trunk@1449977 13f79535-47bb-0310-9956-ffa450edef68 --- .../hadoop-common/CHANGES.txt | 2 ++ .../apache/hadoop/fs/ChecksumFileSystem.java | 20 +------------------ .../org/apache/hadoop/fs/FileContext.java | 2 +- .../java/org/apache/hadoop/fs/FileSystem.java | 2 +- .../apache/hadoop/fs/PositionedReadable.java | 2 +- .../org/apache/hadoop/fs/TrashPolicy.java | 12 +++++------ .../org/apache/hadoop/io/BytesWritable.java | 2 +- .../main/java/org/apache/hadoop/io/Text.java | 4 ++-- .../java/org/apache/hadoop/record/Buffer.java | 2 +- .../apache/hadoop/record/RecordOutput.java | 2 +- 10 files changed, 16 insertions(+), 34 deletions(-) diff --git a/hadoop-common-project/hadoop-common/CHANGES.txt b/hadoop-common-project/hadoop-common/CHANGES.txt index 800a1b5fa80..2828a584696 100644 --- a/hadoop-common-project/hadoop-common/CHANGES.txt +++ b/hadoop-common-project/hadoop-common/CHANGES.txt @@ -385,6 +385,8 @@ Release 2.0.4-beta - UNRELEASED HADOOP-8569. CMakeLists.txt: define _GNU_SOURCE and _LARGEFILE_SOURCE. (Colin Patrick McCabe via atm) + HADOOP-9323. Fix typos in API documentation. (suresh) + Release 2.0.3-alpha - 2013-02-06 INCOMPATIBLE CHANGES diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java index 42ee8702688..2a8db698d48 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java @@ -21,8 +21,6 @@ package org.apache.hadoop.fs; import java.io.*; import java.util.Arrays; -import org.apache.commons.logging.Log; -import org.apache.commons.logging.LogFactory; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; @@ -32,7 +30,7 @@ import org.apache.hadoop.util.PureJavaCrc32; /**************************************************************** * Abstract Checksumed FileSystem. - * It provide a basice implementation of a Checksumed FileSystem, + * It provide a basic implementation of a Checksumed FileSystem, * which creates a checksum file for each raw file. * It generates & verifies checksums at the client side. * @@ -118,9 +116,6 @@ public abstract class ChecksumFileSystem extends FilterFileSystem { * It verifies that data matches checksums. *******************************************************/ private static class ChecksumFSInputChecker extends FSInputChecker { - public static final Log LOG - = LogFactory.getLog(FSInputChecker.class); - private ChecksumFileSystem fs; private FSDataInputStream datas; private FSDataInputStream sums; @@ -374,19 +369,6 @@ public abstract class ChecksumFileSystem extends FilterFileSystem { private FSDataOutputStream sums; private static final float CHKSUM_AS_FRACTION = 0.01f; - public ChecksumFSOutputSummer(ChecksumFileSystem fs, - Path file, - boolean overwrite, - short replication, - long blockSize, - Configuration conf) - throws IOException { - this(fs, file, overwrite, - conf.getInt(LocalFileSystemConfigKeys.LOCAL_FS_STREAM_BUFFER_SIZE_KEY, - LocalFileSystemConfigKeys.LOCAL_FS_STREAM_BUFFER_SIZE_DEFAULT), - replication, blockSize, null); - } - public ChecksumFSOutputSummer(ChecksumFileSystem fs, Path file, boolean overwrite, diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java index d4ff03785c1..26f50503fef 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java @@ -1326,7 +1326,7 @@ public final class FileContext { * * 2. Partially qualified URIs (eg scheme but no host) * - * fs:///A/B/file Resolved according to the target file sytem. Eg resolving + * fs:///A/B/file Resolved according to the target file system. Eg resolving * a symlink to hdfs:///A results in an exception because * HDFS URIs must be fully qualified, while a symlink to * file:///A will not since Hadoop's local file systems diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java index 00a54f70cd3..a26d3570586 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java @@ -1864,7 +1864,7 @@ public abstract class FileSystem extends Configured implements Closeable { * * Some file systems like LocalFileSystem have an initial workingDir * that we use as the starting workingDir. For other file systems - * like HDFS there is no built in notion of an inital workingDir. + * like HDFS there is no built in notion of an initial workingDir. * * @return if there is built in notion of workingDir then it * is returned; else a null is returned. diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PositionedReadable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PositionedReadable.java index a79157b65da..a2384cd8b0b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PositionedReadable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/PositionedReadable.java @@ -43,7 +43,7 @@ public interface PositionedReadable { throws IOException; /** - * Read number of bytes equalt to the length of the buffer, from a given + * Read number of bytes equal to the length of the buffer, from a given * position within a file. This does not * change the current offset of a file, and is thread-safe. */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java index a168f7012e4..eab83b3ca3b 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java @@ -79,19 +79,17 @@ public abstract class TrashPolicy extends Configured { /** * Get an instance of the configured TrashPolicy based on the value - * of the configuration paramater fs.trash.classname. + * of the configuration parameter fs.trash.classname. * * @param conf the configuration to be used * @param fs the file system to be used * @param home the home directory * @return an instance of TrashPolicy */ - public static TrashPolicy getInstance(Configuration conf, FileSystem fs, Path home) - throws IOException { - Class trashClass = conf.getClass("fs.trash.classname", - TrashPolicyDefault.class, - TrashPolicy.class); - TrashPolicy trash = (TrashPolicy) ReflectionUtils.newInstance(trashClass, conf); + public static TrashPolicy getInstance(Configuration conf, FileSystem fs, Path home) { + Class trashClass = conf.getClass( + "fs.trash.classname", TrashPolicyDefault.class, TrashPolicy.class); + TrashPolicy trash = ReflectionUtils.newInstance(trashClass, conf); trash.initialize(conf, fs, home); // initialize TrashPolicy return trash; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java index 7e42a36cb76..155df3a34ca 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java @@ -27,7 +27,7 @@ import org.apache.hadoop.classification.InterfaceStability; /** * A byte sequence that is usable as a key or value. - * It is resizable and distinguishes between the size of the seqeunce and + * It is resizable and distinguishes between the size of the sequence and * the current capacity. The hash function is the front of the md5 of the * buffer. The sort order is the same as memcmp. */ diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java index 95fb174a9d7..a5c8b1ecd5c 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java @@ -128,7 +128,7 @@ public class Text extends BinaryComparable /** * Returns the Unicode Scalar Value (32-bit integer value) * for the character at position. Note that this - * method avoids using the converter or doing String instatiation + * method avoids using the converter or doing String instantiation * @return the Unicode scalar value at position or -1 * if the position is invalid or points to a * trailing byte @@ -527,7 +527,7 @@ public class Text extends BinaryComparable int length = 0; int state = LEAD_BYTE; while (count < start+len) { - int aByte = ((int) utf8[count] & 0xFF); + int aByte = utf8[count] & 0xFF; switch (state) { case LEAD_BYTE: diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/Buffer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/Buffer.java index eb569271d27..50cc1a1912f 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/Buffer.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/Buffer.java @@ -192,7 +192,7 @@ public class Buffer implements Comparable, Cloneable { int hash = 1; byte[] b = this.get(); for (int i = 0; i < count; i++) - hash = (31 * hash) + (int)b[i]; + hash = (31 * hash) + b[i]; return hash; } diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/RecordOutput.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/RecordOutput.java index b2f9f349ddf..503ea35f794 100644 --- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/RecordOutput.java +++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/record/RecordOutput.java @@ -26,7 +26,7 @@ import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; /** - * Interface that alll the serializers have to implement. + * Interface that all the serializers have to implement. * * @deprecated Replaced by Avro. */