diff --git a/hadoop-common-project/hadoop-common/pom.xml b/hadoop-common-project/hadoop-common/pom.xml
index 6e762f567c1..d8e2dd35422 100644
--- a/hadoop-common-project/hadoop-common/pom.xml
+++ b/hadoop-common-project/hadoop-common/pom.xml
@@ -1171,6 +1171,16 @@
+
+ org.apache.maven.plugins
+ maven-javadoc-plugin
+
+
+ **/FSProtos.java
+
+ *.proto:*.tracing:*.protobuf
+
+
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigRedactor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigRedactor.java
index 5b2d1449f9c..881a2ce811b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigRedactor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ConfigRedactor.java
@@ -57,8 +57,8 @@ public class ConfigRedactor {
* Given a key / value pair, decides whether or not to redact and returns
* either the original value or text indicating it has been redacted.
*
- * @param key
- * @param value
+ * @param key param key.
+ * @param value param value, will return if conditions permit.
* @return Original value, or text indicating it has been redacted
*/
public String redact(String key, String value) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
index 1f809b7b547..5f720841d76 100755
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configuration.java
@@ -317,7 +317,7 @@ public class Configuration implements Iterable>,
private boolean loadDefaults = true;
/**
- * Configuration objects
+ * Configuration objects.
*/
private static final WeakHashMap REGISTRY =
new WeakHashMap();
@@ -1908,6 +1908,7 @@ public class Configuration implements Iterable>,
* @param name Property name
* @param vStr The string value with time unit suffix to be converted.
* @param unit Unit to convert the stored property, if it exists.
+ * @return time duration in given time unit.
*/
public long getTimeDurationHelper(String name, String vStr, TimeUnit unit) {
return getTimeDurationHelper(name, vStr, unit, unit);
@@ -1922,6 +1923,7 @@ public class Configuration implements Iterable>,
* @param vStr The string value with time unit suffix to be converted.
* @param defaultUnit Unit to convert the stored property, if it exists.
* @param returnUnit Unit for the returned value.
+ * @return time duration in given time unit.
*/
private long getTimeDurationHelper(String name, String vStr,
TimeUnit defaultUnit, TimeUnit returnUnit) {
@@ -2206,7 +2208,7 @@ public class Configuration implements Iterable>,
}
/**
- * Is the given value in the set of ranges
+ * Is the given value in the set of ranges.
* @param value the value to check
* @return is the value in the ranges?
*/
@@ -2263,7 +2265,7 @@ public class Configuration implements Iterable>,
}
/**
- * Parse the given attribute as a set of integer ranges
+ * Parse the given attribute as a set of integer ranges.
* @param name the attribute name
* @param defaultValue the default value if it is not set
* @return a new set of ranges from the configured value
@@ -2482,7 +2484,7 @@ public class Configuration implements Iterable>,
/**
* Fallback to clear text passwords in configuration.
- * @param name
+ * @param name the property name.
* @return clear text password or null
*/
protected char[] getPasswordFromConfig(String name) {
@@ -2547,6 +2549,8 @@ public class Configuration implements Iterable>,
/**
* Set the socket address for the name
property as
* a host:port
.
+ * @param name property name.
+ * @param addr inetSocketAddress addr.
*/
public void setSocketAddr(String name, InetSocketAddress addr) {
set(name, NetUtils.getHostPortString(addr));
@@ -2724,6 +2728,7 @@ public class Configuration implements Iterable>,
* @param name the conf key name.
* @param defaultValue default value.
* @param xface the interface implemented by the named class.
+ * @param Interface class type.
* @return property value as a Class
,
* or defaultValue
.
*/
@@ -2753,6 +2758,7 @@ public class Configuration implements Iterable>,
* @param name the property name.
* @param xface the interface implemented by the classes named by
* name
.
+ * @param Interface class type.
* @return a List
of objects implementing xface
.
*/
@SuppressWarnings("unchecked")
@@ -2785,15 +2791,16 @@ public class Configuration implements Iterable>,
set(name, theClass.getName());
}
- /**
+ /**
* Get a local file under a directory named by dirsProp with
* the given path . If dirsProp contains multiple directories,
* then one is chosen based on path 's hash code. If the selected
* directory does not exist, an attempt is made to create it.
- *
+ *
* @param dirsProp directory in which to locate the file.
* @param path file-path.
* @return local file under the directory with the given path.
+ * @throws IOException raised on errors performing I/O.
*/
public Path getLocalPath(String dirsProp, String path)
throws IOException {
@@ -2817,15 +2824,16 @@ public class Configuration implements Iterable>,
throw new IOException("No valid local directories in property: "+dirsProp);
}
- /**
+ /**
* Get a local file name under a directory named in dirsProp with
* the given path . If dirsProp contains multiple directories,
* then one is chosen based on path 's hash code. If the selected
* directory does not exist, an attempt is made to create it.
- *
+ *
* @param dirsProp directory in which to locate the file.
* @param path file-path.
* @return local file under the directory with the given path.
+ * @throws IOException raised on errors performing I/O.
*/
public File getFile(String dirsProp, String path)
throws IOException {
@@ -3437,7 +3445,7 @@ public class Configuration implements Iterable>,
/**
* Add tags defined in HADOOP_TAGS_SYSTEM, HADOOP_TAGS_CUSTOM.
- * @param prop
+ * @param prop properties.
*/
public void addTags(Properties prop) {
// Get all system tags
@@ -3538,7 +3546,7 @@ public class Configuration implements Iterable>,
/**
* Print a warning if a property with a given name already exists with a
- * different value
+ * different value.
*/
private void checkForOverride(Properties properties, String name, String attr, String value) {
String propertyValue = properties.getProperty(attr);
@@ -3548,11 +3556,12 @@ public class Configuration implements Iterable>,
}
}
- /**
+ /**
* Write out the non-default properties in this configuration to the given
* {@link OutputStream} using UTF-8 encoding.
- *
+ *
* @param out the output stream to write to.
+ * @throws IOException raised on errors performing I/O.
*/
public void writeXml(OutputStream out) throws IOException {
writeXml(new OutputStreamWriter(out, "UTF-8"));
@@ -3582,7 +3591,9 @@ public class Configuration implements Iterable>,
* the configuration, this method throws an {@link IllegalArgumentException}.
*
*
+ * @param propertyName xml property name.
* @param out the writer to write to.
+ * @throws IOException raised on errors performing I/O.
*/
public void writeXml(@Nullable String propertyName, Writer out)
throws IOException, IllegalArgumentException {
@@ -3736,7 +3747,7 @@ public class Configuration implements Iterable>,
* @param config the configuration
* @param propertyName property name
* @param out the Writer to write to
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
* @throws IllegalArgumentException when property name is not
* empty and the property is not found in configuration
**/
@@ -3783,7 +3794,7 @@ public class Configuration implements Iterable>,
*
* @param config the configuration
* @param out the Writer to write to
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static void dumpConfiguration(Configuration config,
Writer out) throws IOException {
@@ -3812,7 +3823,7 @@ public class Configuration implements Iterable>,
* @param jsonGen json writer
* @param config configuration
* @param name property name
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
private static void appendJSONProperty(JsonGenerator jsonGen,
Configuration config, String name, ConfigRedactor redactor)
@@ -3894,7 +3905,10 @@ public class Configuration implements Iterable>,
return this.quietmode;
}
- /** For debugging. List non-default properties to the terminal and exit. */
+ /** For debugging. List non-default properties to the terminal and exit.
+ * @param args the argument to be parsed.
+ * @throws Exception exception.
+ */
public static void main(String[] args) throws Exception {
new Configuration().writeXml(System.out);
}
@@ -3928,8 +3942,8 @@ public class Configuration implements Iterable>,
}
/**
- * get keys matching the the regex
- * @param regex
+ * get keys matching the the regex.
+ * @param regex the regex to match against.
* @return {@literal Map} with matching keys
*/
public Map getValByRegex(String regex) {
@@ -3974,6 +3988,8 @@ public class Configuration implements Iterable>,
/**
* Returns whether or not a deprecated name has been warned. If the name is not
* deprecated then always return false
+ * @param name proprties.
+ * @return true if name is a warned deprecation.
*/
public static boolean hasWarnedDeprecation(String name) {
DeprecationContext deprecations = deprecationContext.get();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java
index f06af2b98df..77a7117d196 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Configured.java
@@ -33,7 +33,9 @@ public class Configured implements Configurable {
this(null);
}
- /** Construct a Configured. */
+ /** Construct a Configured.
+ * @param conf the Configuration object.
+ */
public Configured(Configuration conf) {
setConf(conf);
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Reconfigurable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Reconfigurable.java
index c93dc31a881..915faf4c237 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Reconfigurable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/Reconfigurable.java
@@ -33,6 +33,9 @@ public interface Reconfigurable extends Configurable {
* (or null if it was not previously set). If newVal is null, set the property
* to its default value;
*
+ * @param property property name.
+ * @param newVal new value.
+ * @throws ReconfigurationException if there was an error applying newVal.
* If the property cannot be changed, throw a
* {@link ReconfigurationException}.
*/
@@ -45,11 +48,14 @@ public interface Reconfigurable extends Configurable {
* If isPropertyReconfigurable returns true for a property,
* then changeConf should not throw an exception when changing
* this property.
+ * @param property property name.
+ * @return true if property reconfigurable; false if not.
*/
boolean isPropertyReconfigurable(String property);
/**
* Return all the properties that can be changed at run time.
+ * @return reconfigurable propertys.
*/
Collection getReconfigurableProperties();
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java
index 35dfeb99f0b..1c451ca6d30 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurableBase.java
@@ -79,6 +79,7 @@ public abstract class ReconfigurableBase
/**
* Construct a ReconfigurableBase with the {@link Configuration}
* conf.
+ * @param conf configuration.
*/
public ReconfigurableBase(Configuration conf) {
super((conf == null) ? new Configuration() : conf);
@@ -91,6 +92,7 @@ public abstract class ReconfigurableBase
/**
* Create a new configuration.
+ * @return configuration.
*/
protected abstract Configuration getNewConf();
@@ -162,6 +164,7 @@ public abstract class ReconfigurableBase
/**
* Start a reconfiguration task to reload configuration in background.
+ * @throws IOException raised on errors performing I/O.
*/
public void startReconfigurationTask() throws IOException {
synchronized (reconfigLock) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationException.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationException.java
index 0935bf025fd..b22af76c9eb 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationException.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationException.java
@@ -59,6 +59,10 @@ public class ReconfigurationException extends Exception {
/**
* Create a new instance of {@link ReconfigurationException}.
+ * @param property property name.
+ * @param newVal new value.
+ * @param oldVal old value.
+ * @param cause original exception.
*/
public ReconfigurationException(String property,
String newVal, String oldVal,
@@ -71,6 +75,9 @@ public class ReconfigurationException extends Exception {
/**
* Create a new instance of {@link ReconfigurationException}.
+ * @param property property name.
+ * @param newVal new value.
+ * @param oldVal old value.
*/
public ReconfigurationException(String property,
String newVal, String oldVal) {
@@ -82,6 +89,7 @@ public class ReconfigurationException extends Exception {
/**
* Get property that cannot be changed.
+ * @return property info.
*/
public String getProperty() {
return property;
@@ -89,6 +97,7 @@ public class ReconfigurationException extends Exception {
/**
* Get value to which property was supposed to be changed.
+ * @return new value.
*/
public String getNewValue() {
return newVal;
@@ -96,6 +105,7 @@ public class ReconfigurationException extends Exception {
/**
* Get old value of property that cannot be changed.
+ * @return old value.
*/
public String getOldValue() {
return oldVal;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationTaskStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationTaskStatus.java
index 05ec90758e5..ca9ddb61566 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationTaskStatus.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/conf/ReconfigurationTaskStatus.java
@@ -42,7 +42,8 @@ public class ReconfigurationTaskStatus {
/**
* Return true if
* - A reconfiguration task has finished or
- * - an active reconfiguration task is running
+ * - an active reconfiguration task is running.
+ * @return true if startTime > 0; false if not.
*/
public boolean hasTask() {
return startTime > 0;
@@ -51,6 +52,7 @@ public class ReconfigurationTaskStatus {
/**
* Return true if the latest reconfiguration task has finished and there is
* no another active task running.
+ * @return true if endTime > 0; false if not.
*/
public boolean stopped() {
return endTime > 0;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java
index 64c754faa59..e6813b96a26 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoCodec.java
@@ -145,14 +145,18 @@ public abstract class CryptoCodec implements Configurable, Closeable {
public abstract CipherSuite getCipherSuite();
/**
- * Create a {@link org.apache.hadoop.crypto.Encryptor}.
- * @return Encryptor the encryptor
+ * Create a {@link org.apache.hadoop.crypto.Encryptor}.
+ *
+ * @return Encryptor the encryptor.
+ * @throws GeneralSecurityException thrown if create encryptor error.
*/
public abstract Encryptor createEncryptor() throws GeneralSecurityException;
-
+
/**
* Create a {@link org.apache.hadoop.crypto.Decryptor}.
+ *
* @return Decryptor the decryptor
+ * @throws GeneralSecurityException thrown if create decryptor error.
*/
public abstract Decryptor createDecryptor() throws GeneralSecurityException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java
index 5ab5d341fb8..067abde9dfb 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoInputStream.java
@@ -157,7 +157,7 @@ public class CryptoInputStream extends FilterInputStream implements
* @param off the buffer offset.
* @param len the maximum number of decrypted data bytes to read.
* @return int the total number of decrypted data bytes read into the buffer.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public int read(byte[] b, int off, int len) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java
index 8e752211255..2a1335b6e74 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoOutputStream.java
@@ -146,7 +146,7 @@ public class CryptoOutputStream extends FilterOutputStream implements
* @param b the data.
* @param off the start offset in the data.
* @param len the number of bytes to write.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public synchronized void write(byte[] b, int off, int len) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java
index 318975fd6ce..dad4d20df2a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/CryptoStreamUtils.java
@@ -39,7 +39,11 @@ public class CryptoStreamUtils {
private static final Logger LOG =
LoggerFactory.getLogger(CryptoStreamUtils.class);
- /** Forcibly free the direct buffer. */
+ /**
+ * Forcibly free the direct buffer.
+ *
+ * @param buffer buffer.
+ */
public static void freeDB(ByteBuffer buffer) {
if (CleanerUtil.UNMAP_SUPPORTED) {
try {
@@ -52,13 +56,22 @@ public class CryptoStreamUtils {
}
}
- /** Read crypto buffer size */
+ /**
+ * Read crypto buffer size.
+ *
+ * @param conf configuration.
+ * @return hadoop.security.crypto.buffer.size.
+ */
public static int getBufferSize(Configuration conf) {
return conf.getInt(HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_KEY,
HADOOP_SECURITY_CRYPTO_BUFFER_SIZE_DEFAULT);
}
-
- /** AES/CTR/NoPadding or SM4/CTR/NoPadding is required. */
+
+ /**
+ * AES/CTR/NoPadding or SM4/CTR/NoPadding is required.
+ *
+ * @param codec crypto codec.
+ */
public static void checkCodec(CryptoCodec codec) {
if (codec.getCipherSuite() != CipherSuite.AES_CTR_NOPADDING &&
codec.getCipherSuite() != CipherSuite.SM4_CTR_NOPADDING) {
@@ -67,17 +80,27 @@ public class CryptoStreamUtils {
}
}
- /** Check and floor buffer size */
+ /**
+ * Check and floor buffer size.
+ *
+ * @param codec crypto codec.
+ * @param bufferSize the size of the buffer to be used.
+ * @return calc buffer size.
+ */
public static int checkBufferSize(CryptoCodec codec, int bufferSize) {
Preconditions.checkArgument(bufferSize >= MIN_BUFFER_SIZE,
"Minimum value of buffer size is " + MIN_BUFFER_SIZE + ".");
return bufferSize - bufferSize % codec.getCipherSuite()
.getAlgorithmBlockSize();
}
-
+
/**
* If input stream is {@link org.apache.hadoop.fs.Seekable}, return it's
* current position, otherwise return 0;
+ *
+ * @param in wrapper.
+ * @return current position, otherwise return 0.
+ * @throws IOException raised on errors performing I/O.
*/
public static long getInputStreamOffset(InputStream in) throws IOException {
if (in instanceof Seekable) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
index 0c65b74b291..b166cfc8611 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/OpensslCipher.java
@@ -225,34 +225,33 @@ public final class OpensslCipher {
output.position(output.position() + len);
return len;
}
-
+
/**
* Finishes a multiple-part operation. The data is encrypted or decrypted,
* depending on how this cipher was initialized.
*
- *
* The result is stored in the output buffer. Upon return, the output buffer's
* position will have advanced by n, where n is the value returned by this
* method; the output buffer's limit will not have changed.
- *
- *
+ *
* If output.remaining()
bytes are insufficient to hold the result,
* a ShortBufferException
is thrown.
*
- *
* Upon finishing, this method resets this cipher object to the state it was
* in when previously initialized. That is, the object is available to encrypt
* or decrypt more data.
- *
- *
- * If any exception is thrown, this cipher object need to be reset before it
+ *
+ * If any exception is thrown, this cipher object need to be reset before it
* can be used again.
- *
+ *
* @param output the output ByteBuffer
* @return int number of bytes stored in output
- * @throws ShortBufferException
- * @throws IllegalBlockSizeException
- * @throws BadPaddingException
+ * @throws ShortBufferException if there is insufficient space in the output buffer.
+ * @throws IllegalBlockSizeException This exception is thrown when the length
+ * of data provided to a block cipher is incorrect.
+ * @throws BadPaddingException This exception is thrown when a particular
+ * padding mechanism is expected for the input
+ * data but the data is not padded properly.
*/
public int doFinal(ByteBuffer output) throws ShortBufferException,
IllegalBlockSizeException, BadPaddingException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
index dafdaf7e15b..4d1674bd7b8 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProvider.java
@@ -242,7 +242,7 @@ public abstract class KeyProvider implements Closeable {
/**
* Serialize the metadata to a set of bytes.
* @return the serialized bytes
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
protected byte[] serialize() throws IOException {
ByteArrayOutputStream buffer = new ByteArrayOutputStream();
@@ -281,7 +281,7 @@ public abstract class KeyProvider implements Closeable {
/**
* Deserialize a new metadata object from a set of bytes.
* @param bytes the serialized metadata
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
protected Metadata(byte[] bytes) throws IOException {
String cipher = null;
@@ -450,7 +450,7 @@ public abstract class KeyProvider implements Closeable {
* when decrypting data.
* @param versionName the name of a specific version of the key
* @return the key material
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public abstract KeyVersion getKeyVersion(String versionName
) throws IOException;
@@ -458,14 +458,15 @@ public abstract class KeyProvider implements Closeable {
/**
* Get the key names for all keys.
* @return the list of key names
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public abstract List getKeys() throws IOException;
/**
* Get key metadata in bulk.
* @param names the names of the keys to get
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
+ * @return Metadata Array.
*/
public Metadata[] getKeysMetadata(String... names) throws IOException {
Metadata[] result = new Metadata[names.length];
@@ -477,8 +478,10 @@ public abstract class KeyProvider implements Closeable {
/**
* Get the key material for all versions of a specific key name.
+ *
+ * @param name the base name of the key.
* @return the list of key material
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public abstract List getKeyVersions(String name) throws IOException;
@@ -488,7 +491,7 @@ public abstract class KeyProvider implements Closeable {
* @param name the base name of the key
* @return the version name of the current version of the key or null if the
* key version doesn't exist
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public KeyVersion getCurrentKey(String name) throws IOException {
Metadata meta = getMetadata(name);
@@ -502,7 +505,7 @@ public abstract class KeyProvider implements Closeable {
* Get metadata about the key.
* @param name the basename of the key
* @return the key's metadata or null if the key doesn't exist
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public abstract Metadata getMetadata(String name) throws IOException;
@@ -512,7 +515,7 @@ public abstract class KeyProvider implements Closeable {
* @param material the key material for the first version of the key.
* @param options the options for the new key.
* @return the version name of the first version of the key.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public abstract KeyVersion createKey(String name, byte[] material,
Options options) throws IOException;
@@ -537,7 +540,7 @@ public abstract class KeyProvider implements Closeable {
* @param size length of the key.
* @param algorithm algorithm to use for generating the key.
* @return the generated key.
- * @throws NoSuchAlgorithmException
+ * @throws NoSuchAlgorithmException no such algorithm exception.
*/
protected byte[] generateKey(int size, String algorithm)
throws NoSuchAlgorithmException {
@@ -558,8 +561,8 @@ public abstract class KeyProvider implements Closeable {
* @param name the base name of the key
* @param options the options for the new key.
* @return the version name of the first version of the key.
- * @throws IOException
- * @throws NoSuchAlgorithmException
+ * @throws IOException raised on errors performing I/O.
+ * @throws NoSuchAlgorithmException no such algorithm exception.
*/
public KeyVersion createKey(String name, Options options)
throws NoSuchAlgorithmException, IOException {
@@ -570,7 +573,7 @@ public abstract class KeyProvider implements Closeable {
/**
* Delete the given key.
* @param name the name of the key to delete
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void deleteKey(String name) throws IOException;
@@ -579,7 +582,7 @@ public abstract class KeyProvider implements Closeable {
* @param name the basename of the key
* @param material the new key material
* @return the name of the new version of the key
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public abstract KeyVersion rollNewVersion(String name,
byte[] material
@@ -601,7 +604,10 @@ public abstract class KeyProvider implements Closeable {
*
* @param name the basename of the key
* @return the name of the new version of the key
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
+ * @throws NoSuchAlgorithmException This exception is thrown when a particular
+ * cryptographic algorithm is requested
+ * but is not available in the environment.
*/
public KeyVersion rollNewVersion(String name) throws NoSuchAlgorithmException,
IOException {
@@ -620,7 +626,7 @@ public abstract class KeyProvider implements Closeable {
* version of the given key.
*
* @param name the basename of the key
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void invalidateCache(String name) throws IOException {
// NOP
@@ -628,7 +634,7 @@ public abstract class KeyProvider implements Closeable {
/**
* Ensures that any changes to the keys are written to persistent store.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void flush() throws IOException;
@@ -637,7 +643,7 @@ public abstract class KeyProvider implements Closeable {
* "/aaa/bbb".
* @param versionName the version name to split
* @return the base name of the key
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static String getBaseName(String versionName) throws IOException {
int div = versionName.lastIndexOf('@');
@@ -660,9 +666,11 @@ public abstract class KeyProvider implements Closeable {
/**
* Find the provider with the given key.
+ *
* @param providerList the list of providers
- * @param keyName the key name we are looking for
+ * @param keyName the key name we are looking for.
* @return the KeyProvider that has the key
+ * @throws IOException raised on errors performing I/O.
*/
public static KeyProvider findProvider(List providerList,
String keyName) throws IOException {
@@ -680,7 +688,7 @@ public abstract class KeyProvider implements Closeable {
* means. If true, the password should be provided by the caller using
* setPassword().
* @return Whether or not the provider requires a password
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public boolean needsPassword() throws IOException {
return false;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java
index 3f3c367fc39..d706e5ef100 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderCryptoExtension.java
@@ -178,6 +178,7 @@ public class KeyProviderCryptoExtension extends
* Calls to this method allows the underlying KeyProvider to warm-up any
* implementation specific caches used to store the Encrypted Keys.
* @param keyNames Array of Key Names
+ * @throws IOException thrown if the key material could not be encrypted.
*/
public void warmUpEncryptedKeys(String... keyNames)
throws IOException;
@@ -474,8 +475,9 @@ public class KeyProviderCryptoExtension extends
/**
* This constructor is to be used by sub classes that provide
* delegating/proxying functionality to the {@link KeyProviderCryptoExtension}
- * @param keyProvider
- * @param extension
+ *
+ * @param keyProvider key provider.
+ * @param extension crypto extension.
*/
protected KeyProviderCryptoExtension(KeyProvider keyProvider,
CryptoExtension extension) {
@@ -486,6 +488,7 @@ public class KeyProviderCryptoExtension extends
* Notifies the Underlying CryptoExtension implementation to warm up any
* implementation specific caches for the specified KeyVersions
* @param keyNames Arrays of key Names
+ * @throws IOException raised on errors performing I/O.
*/
public void warmUpEncryptedKeys(String... keyNames)
throws IOException {
@@ -557,7 +560,7 @@ public class KeyProviderCryptoExtension extends
* Calls {@link CryptoExtension#drain(String)} for the given key name on the
* underlying {@link CryptoExtension}.
*
- * @param keyName
+ * @param keyName key name.
*/
public void drain(String keyName) {
getExtension().drain(keyName);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderDelegationTokenExtension.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderDelegationTokenExtension.java
index 1fdc2fe1245..3c1af424eb7 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderDelegationTokenExtension.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyProviderDelegationTokenExtension.java
@@ -48,14 +48,14 @@ public class KeyProviderDelegationTokenExtension extends
* Renews the given token.
* @param token The token to be renewed.
* @return The token's lifetime after renewal, or 0 if it can't be renewed.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
long renewDelegationToken(final Token> token) throws IOException;
/**
* Cancels the given token.
* @param token The token to be cancelled.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
Void cancelDelegationToken(final Token> token) throws IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java
index a75f7d3aa63..c18d0d41bc0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/KeyShell.java
@@ -75,7 +75,7 @@ public class KeyShell extends CommandShell {
*
* @param args Command line arguments.
* @return 0 on success, 1 on failure.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
protected int init(String[] args) throws IOException {
@@ -547,7 +547,7 @@ public class KeyShell extends CommandShell {
* success and 1 for failure.
*
* @param args Command line arguments.
- * @throws Exception
+ * @throws Exception raised on errors performing I/O.
*/
public static void main(String[] args) throws Exception {
int res = ToolRunner.run(new Configuration(), new KeyShell(), args);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java
index be2db05842c..65eded918d6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/crypto/key/kms/ValueQueue.java
@@ -63,7 +63,7 @@ public class ValueQueue {
* @param keyName Key name
* @param keyQueue Queue that needs to be filled
* @param numValues number of Values to be added to the queue.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void fillQueueForKey(String keyName,
Queue keyQueue, int numValues) throws IOException;
@@ -268,7 +268,7 @@ public class ValueQueue {
* Initializes the Value Queues for the provided keys by calling the
* fill Method with "numInitValues" values
* @param keyNames Array of key Names
- * @throws ExecutionException
+ * @throws ExecutionException executionException.
*/
public void initializeQueuesForKeys(String... keyNames)
throws ExecutionException {
@@ -285,8 +285,8 @@ public class ValueQueue {
* function to add 1 value to Queue and then drain it.
* @param keyName String key name
* @return E the next value in the Queue
- * @throws IOException
- * @throws ExecutionException
+ * @throws IOException raised on errors performing I/O.
+ * @throws ExecutionException executionException.
*/
public E getNext(String keyName)
throws IOException, ExecutionException {
@@ -344,8 +344,8 @@ public class ValueQueue {
* @param keyName String key name
* @param num Minimum number of values to return.
* @return {@literal List} values returned
- * @throws IOException
- * @throws ExecutionException
+ * @throws IOException raised on errors performing I/O.
+ * @throws ExecutionException execution exception.
*/
public List getAtMost(String keyName, int num) throws IOException,
ExecutionException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
index d9818b472f0..a4737c548c8 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AbstractFileSystem.java
@@ -272,7 +272,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* @param supportedScheme the scheme supported by the implementor
* @param authorityNeeded if true then theURI must have authority, if false
* then the URI must have null authority.
- *
+ * @param defaultPort default port to use if port is not specified in the URI.
* @throws URISyntaxException uri
has syntax error
*/
public AbstractFileSystem(final URI uri, final String supportedScheme,
@@ -281,11 +281,12 @@ public abstract class AbstractFileSystem implements PathCapabilities {
myUri = getUri(uri, supportedScheme, authorityNeeded, defaultPort);
statistics = getStatistics(uri);
}
-
+
/**
- * Check that the Uri's scheme matches
- * @param uri
- * @param supportedScheme
+ * Check that the Uri's scheme matches.
+ *
+ * @param uri name URI of the FS.
+ * @param supportedScheme supported scheme.
*/
public void checkScheme(URI uri, String supportedScheme) {
String scheme = uri.getScheme();
@@ -362,7 +363,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* If the path is fully qualified URI, then its scheme and authority
* matches that of this file system. Otherwise the path must be
* slash-relative name.
- *
+ * @param path the path.
* @throws InvalidPathException if the path is invalid
*/
public void checkPath(Path path) {
@@ -431,7 +432,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
/**
* Make the path fully qualified to this file system
- * @param path
+ * @param path the path.
* @return the qualified path
*/
public Path makeQualified(Path path) {
@@ -496,9 +497,9 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* through any internal symlinks or mount point
* @param p path to be resolved
* @return fully qualified path
- * @throws FileNotFoundException
- * @throws AccessControlException
- * @throws IOException
+ * @throws FileNotFoundException when file not find throw.
+ * @throws AccessControlException when accees control error throw.
+ * @throws IOException raised on errors performing I/O.
* @throws UnresolvedLinkException if symbolic link on path cannot be
* resolved internally
*/
@@ -513,6 +514,18 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* {@link FileContext#create(Path, EnumSet, Options.CreateOpts...)} except
* that the Path f must be fully qualified and the permission is absolute
* (i.e. umask has been applied).
+ *
+ * @param f the path.
+ * @param createFlag create_flag.
+ * @param opts create ops.
+ * @throws AccessControlException access controll exception.
+ * @throws FileAlreadyExistsException file already exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws ParentNotDirectoryException parent not dir exception.
+ * @throws UnsupportedFileSystemException unsupported file system exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return output stream.
*/
public final FSDataOutputStream create(final Path f,
final EnumSet createFlag, Options.CreateOpts... opts)
@@ -630,6 +643,24 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link #create(Path, EnumSet, Options.CreateOpts...)} except that the opts
* have been declared explicitly.
+ *
+ * @param f the path.
+ * @param flag create flag.
+ * @param absolutePermission absolute permission.
+ * @param bufferSize buffer size.
+ * @param replication replications.
+ * @param blockSize block size.
+ * @param progress progress.
+ * @param checksumOpt check sum opt.
+ * @param createParent create parent.
+ * @throws AccessControlException access control exception.
+ * @throws FileAlreadyExistsException file already exists exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws ParentNotDirectoryException parent not directory exception.
+ * @throws UnsupportedFileSystemException unsupported filesystem exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return output stream.
*/
public abstract FSDataOutputStream createInternal(Path f,
EnumSet flag, FsPermission absolutePermission,
@@ -644,6 +675,14 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* {@link FileContext#mkdir(Path, FsPermission, boolean)} except that the Path
* f must be fully qualified and the permission is absolute (i.e.
* umask has been applied).
+ * @param dir directory.
+ * @param permission permission.
+ * @param createParent create parent flag.
+ * @throws AccessControlException access control exception.
+ * @throws FileAlreadyExistsException file already exists exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void mkdir(final Path dir, final FsPermission permission,
final boolean createParent) throws AccessControlException,
@@ -654,6 +693,14 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#delete(Path, boolean)} except that Path f must be for
* this file system.
+ *
+ * @param f the path.
+ * @param recursive recursive flag.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return if successfully deleted success true, not false.
*/
public abstract boolean delete(final Path f, final boolean recursive)
throws AccessControlException, FileNotFoundException,
@@ -663,6 +710,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#open(Path)} except that Path f must be for this
* file system.
+ *
+ * @param f the path.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return input stream.
*/
public FSDataInputStream open(final Path f) throws AccessControlException,
FileNotFoundException, UnresolvedLinkException, IOException {
@@ -673,6 +727,14 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#open(Path, int)} except that Path f must be for this
* file system.
+ *
+ * @param f the path.
+ * @param bufferSize buffer size.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return if successfully open success true, not false.
*/
public abstract FSDataInputStream open(final Path f, int bufferSize)
throws AccessControlException, FileNotFoundException,
@@ -682,6 +744,14 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#truncate(Path, long)} except that Path f must be for
* this file system.
+ *
+ * @param f the path.
+ * @param newLength new length.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return if successfully truncate success true, not false.
*/
public boolean truncate(Path f, long newLength)
throws AccessControlException, FileNotFoundException,
@@ -694,6 +764,14 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#setReplication(Path, short)} except that Path f must be
* for this file system.
+ *
+ * @param f the path.
+ * @param replication replication.
+ * @return if successfully set replication success true, not false.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
*/
public abstract boolean setReplication(final Path f,
final short replication) throws AccessControlException,
@@ -703,6 +781,16 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#rename(Path, Path, Options.Rename...)} except that Path
* f must be for this file system.
+ *
+ * @param src src.
+ * @param dst dst.
+ * @param options options.
+ * @throws AccessControlException access control exception.
+ * @throws FileAlreadyExistsException file already exists exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws ParentNotDirectoryException parent not directory exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
*/
public final void rename(final Path src, final Path dst,
final Options.Rename... options) throws AccessControlException,
@@ -727,6 +815,15 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* File systems that do not have a built in overwrite need implement only this
* method and can take advantage of the default impl of the other
* {@link #renameInternal(Path, Path, boolean)}
+ *
+ * @param src src.
+ * @param dst dst.
+ * @throws AccessControlException access control exception.
+ * @throws FileAlreadyExistsException file already exists exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws ParentNotDirectoryException parent not directory exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void renameInternal(final Path src, final Path dst)
throws AccessControlException, FileAlreadyExistsException,
@@ -737,6 +834,16 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#rename(Path, Path, Options.Rename...)} except that Path
* f must be for this file system.
+ *
+ * @param src src.
+ * @param dst dst.
+ * @param overwrite overwrite flag.
+ * @throws AccessControlException access control exception.
+ * @throws FileAlreadyExistsException file already exists exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws ParentNotDirectoryException parent not directory exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
*/
public void renameInternal(final Path src, final Path dst,
boolean overwrite) throws AccessControlException,
@@ -800,6 +907,12 @@ public abstract class AbstractFileSystem implements PathCapabilities {
/**
* The specification of this method matches that of
* {@link FileContext#createSymlink(Path, Path, boolean)};
+ *
+ * @param target target.
+ * @param link link.
+ * @param createParent create parent.
+ * @throws IOException raised on errors performing I/O.
+ * @throws UnresolvedLinkException unresolved link exception.
*/
public void createSymlink(final Path target, final Path link,
final boolean createParent) throws IOException, UnresolvedLinkException {
@@ -810,6 +923,8 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* Partially resolves the path. This is used during symlink resolution in
* {@link FSLinkResolver}, and differs from the similarly named method
* {@link FileContext#getLinkTarget(Path)}.
+ * @param f the path.
+ * @return target path.
* @throws IOException subclass implementations may throw IOException
*/
public Path getLinkTarget(final Path f) throws IOException {
@@ -822,6 +937,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#setPermission(Path, FsPermission)} except that Path f
* must be for this file system.
+ *
+ * @param f the path.
+ * @param permission permission.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void setPermission(final Path f,
final FsPermission permission) throws AccessControlException,
@@ -831,6 +953,14 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#setOwner(Path, String, String)} except that Path f must
* be for this file system.
+ *
+ * @param f the path.
+ * @param username username.
+ * @param groupname groupname.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void setOwner(final Path f, final String username,
final String groupname) throws AccessControlException,
@@ -840,6 +970,14 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#setTimes(Path, long, long)} except that Path f must be
* for this file system.
+ *
+ * @param f the path.
+ * @param mtime modify time.
+ * @param atime access time.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void setTimes(final Path f, final long mtime,
final long atime) throws AccessControlException, FileNotFoundException,
@@ -849,6 +987,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#getFileChecksum(Path)} except that Path f must be for
* this file system.
+ *
+ * @param f the path.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return File Check sum.
*/
public abstract FileChecksum getFileChecksum(final Path f)
throws AccessControlException, FileNotFoundException,
@@ -859,6 +1004,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* {@link FileContext#getFileStatus(Path)}
* except that an UnresolvedLinkException may be thrown if a symlink is
* encountered in the path.
+ *
+ * @param f the path.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return File Status
*/
public abstract FileStatus getFileStatus(final Path f)
throws AccessControlException, FileNotFoundException,
@@ -870,8 +1022,8 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* In some FileSystem implementations such as HDFS metadata
* synchronization is essential to guarantee consistency of read requests
* particularly in HA setting.
- * @throws IOException
- * @throws UnsupportedOperationException
+ * @throws IOException raised on errors performing I/O.
+ * @throws UnsupportedOperationException Unsupported Operation Exception.
*/
public void msync() throws IOException, UnsupportedOperationException {
throw new UnsupportedOperationException(getClass().getCanonicalName() +
@@ -883,6 +1035,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* {@link FileContext#access(Path, FsAction)}
* except that an UnresolvedLinkException may be thrown if a symlink is
* encountered in the path.
+ *
+ * @param path the path.
+ * @param mode fsaction mode.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
*/
@InterfaceAudience.LimitedPrivate({"HDFS", "Hive"})
public void access(Path path, FsAction mode) throws AccessControlException,
@@ -897,6 +1056,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* encountered in the path leading up to the final path component.
* If the file system does not support symlinks then the behavior is
* equivalent to {@link AbstractFileSystem#getFileStatus(Path)}.
+ *
+ * @param f the path.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnsupportedFileSystemException UnSupported File System Exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return file status.
*/
public FileStatus getFileLinkStatus(final Path f)
throws AccessControlException, FileNotFoundException,
@@ -908,6 +1074,15 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#getFileBlockLocations(Path, long, long)} except that
* Path f must be for this file system.
+ *
+ * @param f the path.
+ * @param start start.
+ * @param len length.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return BlockLocation Array.
*/
public abstract BlockLocation[] getFileBlockLocations(final Path f,
final long start, final long len) throws AccessControlException,
@@ -917,6 +1092,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#getFsStatus(Path)} except that Path f must be for this
* file system.
+ *
+ * @param f the path.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return Fs Status.
*/
public FsStatus getFsStatus(final Path f) throws AccessControlException,
FileNotFoundException, UnresolvedLinkException, IOException {
@@ -927,6 +1109,11 @@ public abstract class AbstractFileSystem implements PathCapabilities {
/**
* The specification of this method matches that of
* {@link FileContext#getFsStatus(Path)}.
+ *
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return Fs Status.
*/
public abstract FsStatus getFsStatus() throws AccessControlException,
FileNotFoundException, IOException;
@@ -935,6 +1122,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#listStatus(Path)} except that Path f must be for this
* file system.
+ *
+ * @param f path.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return FileStatus Iterator.
*/
public RemoteIterator listStatusIterator(final Path f)
throws AccessControlException, FileNotFoundException,
@@ -967,6 +1161,13 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* will have different formats for replicated and erasure coded file. Please
* refer to {@link FileSystem#getFileBlockLocations(FileStatus, long, long)}
* for more details.
+ *
+ * @param f the path.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return FileStatus Iterator.
*/
public RemoteIterator listLocatedStatus(final Path f)
throws AccessControlException, FileNotFoundException,
@@ -999,6 +1200,12 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext.Util#listStatus(Path)} except that Path f must be
* for this file system.
+ * @param f the path.
+ * @throws AccessControlException access control exception.
+ * @throws FileNotFoundException file not found exception.
+ * @throws UnresolvedLinkException unresolved link exception.
+ * @throws IOException raised on errors performing I/O.
+ * @return FileStatus Iterator.
*/
public abstract FileStatus[] listStatus(final Path f)
throws AccessControlException, FileNotFoundException,
@@ -1007,7 +1214,8 @@ public abstract class AbstractFileSystem implements PathCapabilities {
/**
* @return an iterator over the corrupt files under the given path
* (may contain duplicates if a file has more than one corrupt block)
- * @throws IOException
+ * @param path the path.
+ * @throws IOException raised on errors performing I/O.
*/
public RemoteIterator listCorruptFileBlocks(Path path)
throws IOException {
@@ -1020,6 +1228,10 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* The specification of this method matches that of
* {@link FileContext#setVerifyChecksum(boolean, Path)} except that Path f
* must be for this file system.
+ *
+ * @param verifyChecksum verify check sum flag.
+ * @throws AccessControlException access control exception.
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void setVerifyChecksum(final boolean verifyChecksum)
throws AccessControlException, IOException;
@@ -1041,7 +1253,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* @param renewer the account name that is allowed to renew the token.
* @return List of delegation tokens.
* If delegation tokens not supported then return a list of size zero.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@InterfaceAudience.LimitedPrivate( { "HDFS", "MapReduce" })
public List> getDelegationTokens(String renewer) throws IOException {
@@ -1141,7 +1353,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* @param path Path to modify
* @param name xattr name.
* @param value xattr value.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void setXAttr(Path path, String name, byte[] value)
throws IOException {
@@ -1160,7 +1372,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* @param name xattr name.
* @param value xattr value.
* @param flag xattr set flag
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void setXAttr(Path path, String name, byte[] value,
EnumSet flag) throws IOException {
@@ -1178,7 +1390,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* @param path Path to get extended attribute
* @param name xattr name.
* @return byte[] xattr value.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public byte[] getXAttr(Path path, String name) throws IOException {
throw new UnsupportedOperationException(getClass().getSimpleName()
@@ -1196,7 +1408,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
*
* @return {@literal Map} describing the XAttrs of the file
* or directory
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Map getXAttrs(Path path) throws IOException {
throw new UnsupportedOperationException(getClass().getSimpleName()
@@ -1214,7 +1426,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* @param names XAttr names.
* @return {@literal Map} describing the XAttrs of the file
* or directory
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Map getXAttrs(Path path, List names)
throws IOException {
@@ -1232,7 +1444,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* @param path Path to get extended attributes
* @return {@literal Map} describing the XAttrs of the file
* or directory
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public List listXAttrs(Path path)
throws IOException {
@@ -1249,7 +1461,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
*
* @param path Path to remove extended attribute
* @param name xattr name
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void removeXAttr(Path path, String name) throws IOException {
throw new UnsupportedOperationException(getClass().getSimpleName()
@@ -1259,6 +1471,11 @@ public abstract class AbstractFileSystem implements PathCapabilities {
/**
* The specification of this method matches that of
* {@link FileContext#createSnapshot(Path, String)}.
+ *
+ * @param path the path.
+ * @param snapshotName snapshot name.
+ * @throws IOException raised on errors performing I/O.
+ * @return path.
*/
public Path createSnapshot(final Path path, final String snapshotName)
throws IOException {
@@ -1269,6 +1486,11 @@ public abstract class AbstractFileSystem implements PathCapabilities {
/**
* The specification of this method matches that of
* {@link FileContext#renameSnapshot(Path, String, String)}.
+ *
+ * @param path the path.
+ * @param snapshotOldName snapshot old name.
+ * @param snapshotNewName snapshot new name.
+ * @throws IOException raised on errors performing I/O.
*/
public void renameSnapshot(final Path path, final String snapshotOldName,
final String snapshotNewName) throws IOException {
@@ -1279,6 +1501,10 @@ public abstract class AbstractFileSystem implements PathCapabilities {
/**
* The specification of this method matches that of
* {@link FileContext#deleteSnapshot(Path, String)}.
+ *
+ * @param snapshotDir snapshot dir.
+ * @param snapshotName snapshot name.
+ * @throws IOException raised on errors performing I/O.
*/
public void deleteSnapshot(final Path snapshotDir, final String snapshotName)
throws IOException {
@@ -1289,7 +1515,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
/**
* Set the source path to satisfy storage policy.
* @param path The source path referring to either a directory or a file.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void satisfyStoragePolicy(final Path path) throws IOException {
throw new UnsupportedOperationException(
@@ -1303,6 +1529,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* @param policyName the name of the target storage policy. The list
* of supported Storage policies can be retrieved
* via {@link #getAllStoragePolicies}.
+ * @throws IOException raised on errors performing I/O.
*/
public void setStoragePolicy(final Path path, final String policyName)
throws IOException {
@@ -1314,7 +1541,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
/**
* Unset the storage policy set for a given file or directory.
* @param src file or directory path.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void unsetStoragePolicy(final Path src) throws IOException {
throw new UnsupportedOperationException(getClass().getSimpleName()
@@ -1326,7 +1553,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
*
* @param src file or directory path.
* @return storage policy for give file.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public BlockStoragePolicySpi getStoragePolicy(final Path src)
throws IOException {
@@ -1338,7 +1565,7 @@ public abstract class AbstractFileSystem implements PathCapabilities {
* Retrieve all the storage policies supported by this file system.
*
* @return all storage policies supported by this filesystem.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Collection extends BlockStoragePolicySpi> getAllStoragePolicies()
throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java
index 213fbc24c4d..7518dd2f7ef 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/AvroFSInput.java
@@ -36,13 +36,22 @@ public class AvroFSInput implements Closeable, SeekableInput {
private final FSDataInputStream stream;
private final long len;
- /** Construct given an {@link FSDataInputStream} and its length. */
+ /**
+ * Construct given an {@link FSDataInputStream} and its length.
+ *
+ * @param in inputstream.
+ * @param len len.
+ */
public AvroFSInput(final FSDataInputStream in, final long len) {
this.stream = in;
this.len = len;
}
- /** Construct given a {@link FileContext} and a {@link Path}. */
+ /** Construct given a {@link FileContext} and a {@link Path}.
+ * @param fc filecontext.
+ * @param p the path.
+ * @throws IOException If an I/O error occurred.
+ * */
public AvroFSInput(final FileContext fc, final Path p) throws IOException {
FileStatus status = fc.getFileStatus(p);
this.len = status.getLen();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BatchedRemoteIterator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BatchedRemoteIterator.java
index 607fffbcc70..e693bcbfe89 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BatchedRemoteIterator.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BatchedRemoteIterator.java
@@ -68,6 +68,7 @@ public abstract class BatchedRemoteIterator implements RemoteIterator {
*
* @param prevKey The key to send.
* @return A list of replies.
+ * @throws IOException If an I/O error occurred.
*/
public abstract BatchedEntries makeRequest(K prevKey) throws IOException;
@@ -102,6 +103,8 @@ public abstract class BatchedRemoteIterator implements RemoteIterator {
/**
* Return the next list key associated with an element.
+ * @param element element.
+ * @return K Generics Type.
*/
public abstract K elementToPrevKey(E element);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java
index 29358dd7d10..67687c1f0e0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/BlockLocation.java
@@ -85,6 +85,7 @@ public class BlockLocation implements Serializable {
/**
* Copy constructor.
+ * @param that blocklocation.
*/
public BlockLocation(BlockLocation that) {
this.hosts = that.hosts;
@@ -100,6 +101,10 @@ public class BlockLocation implements Serializable {
/**
* Constructor with host, name, offset and length.
+ * @param names names array.
+ * @param hosts host array.
+ * @param offset offset.
+ * @param length length.
*/
public BlockLocation(String[] names, String[] hosts, long offset,
long length) {
@@ -108,6 +113,11 @@ public class BlockLocation implements Serializable {
/**
* Constructor with host, name, offset, length and corrupt flag.
+ * @param names names.
+ * @param hosts hosts.
+ * @param offset offset.
+ * @param length length.
+ * @param corrupt corrupt.
*/
public BlockLocation(String[] names, String[] hosts, long offset,
long length, boolean corrupt) {
@@ -116,6 +126,11 @@ public class BlockLocation implements Serializable {
/**
* Constructor with host, name, network topology, offset and length.
+ * @param names names.
+ * @param hosts hosts.
+ * @param topologyPaths topologyPaths.
+ * @param offset offset.
+ * @param length length.
*/
public BlockLocation(String[] names, String[] hosts, String[] topologyPaths,
long offset, long length) {
@@ -125,6 +140,12 @@ public class BlockLocation implements Serializable {
/**
* Constructor with host, name, network topology, offset, length
* and corrupt flag.
+ * @param names names.
+ * @param hosts hosts.
+ * @param topologyPaths topologyPaths.
+ * @param offset offset.
+ * @param length length.
+ * @param corrupt corrupt.
*/
public BlockLocation(String[] names, String[] hosts, String[] topologyPaths,
long offset, long length, boolean corrupt) {
@@ -177,6 +198,8 @@ public class BlockLocation implements Serializable {
/**
* Get the list of hosts (hostname) hosting this block.
+ * @return hosts array.
+ * @throws IOException If an I/O error occurred.
*/
public String[] getHosts() throws IOException {
return hosts;
@@ -184,6 +207,7 @@ public class BlockLocation implements Serializable {
/**
* Get the list of hosts (hostname) hosting a cached replica of the block.
+ * @return cached hosts.
*/
public String[] getCachedHosts() {
return cachedHosts;
@@ -191,6 +215,8 @@ public class BlockLocation implements Serializable {
/**
* Get the list of names (IP:xferPort) hosting this block.
+ * @return names array.
+ * @throws IOException If an I/O error occurred.
*/
public String[] getNames() throws IOException {
return names;
@@ -199,6 +225,8 @@ public class BlockLocation implements Serializable {
/**
* Get the list of network topology paths for each of the hosts.
* The last component of the path is the "name" (IP:xferPort).
+ * @return topology paths.
+ * @throws IOException If an I/O error occurred.
*/
public String[] getTopologyPaths() throws IOException {
return topologyPaths;
@@ -206,6 +234,7 @@ public class BlockLocation implements Serializable {
/**
* Get the storageID of each replica of the block.
+ * @return storage ids.
*/
public String[] getStorageIds() {
return storageIds;
@@ -213,6 +242,7 @@ public class BlockLocation implements Serializable {
/**
* Get the storage type of each replica of the block.
+ * @return storage type of each replica of the block.
*/
public StorageType[] getStorageTypes() {
return storageTypes;
@@ -220,6 +250,7 @@ public class BlockLocation implements Serializable {
/**
* Get the start offset of file associated with this block.
+ * @return start offset of file associated with this block.
*/
public long getOffset() {
return offset;
@@ -227,6 +258,7 @@ public class BlockLocation implements Serializable {
/**
* Get the length of the block.
+ * @return length of the block.
*/
public long getLength() {
return length;
@@ -234,6 +266,7 @@ public class BlockLocation implements Serializable {
/**
* Get the corrupt flag.
+ * @return corrupt flag.
*/
public boolean isCorrupt() {
return corrupt;
@@ -241,6 +274,7 @@ public class BlockLocation implements Serializable {
/**
* Return true if the block is striped (erasure coded).
+ * @return if the block is striped true, not false.
*/
public boolean isStriped() {
return false;
@@ -248,6 +282,7 @@ public class BlockLocation implements Serializable {
/**
* Set the start offset of file associated with this block.
+ * @param offset start offset.
*/
public void setOffset(long offset) {
this.offset = offset;
@@ -255,6 +290,7 @@ public class BlockLocation implements Serializable {
/**
* Set the length of block.
+ * @param length length of block.
*/
public void setLength(long length) {
this.length = length;
@@ -262,6 +298,7 @@ public class BlockLocation implements Serializable {
/**
* Set the corrupt flag.
+ * @param corrupt corrupt flag.
*/
public void setCorrupt(boolean corrupt) {
this.corrupt = corrupt;
@@ -269,6 +306,8 @@ public class BlockLocation implements Serializable {
/**
* Set the hosts hosting this block.
+ * @param hosts hosts array.
+ * @throws IOException If an I/O error occurred.
*/
public void setHosts(String[] hosts) throws IOException {
if (hosts == null) {
@@ -280,6 +319,7 @@ public class BlockLocation implements Serializable {
/**
* Set the hosts hosting a cached replica of this block.
+ * @param cachedHosts cached hosts.
*/
public void setCachedHosts(String[] cachedHosts) {
if (cachedHosts == null) {
@@ -291,6 +331,8 @@ public class BlockLocation implements Serializable {
/**
* Set the names (host:port) hosting this block.
+ * @param names names.
+ * @throws IOException If an I/O error occurred.
*/
public void setNames(String[] names) throws IOException {
if (names == null) {
@@ -302,6 +344,9 @@ public class BlockLocation implements Serializable {
/**
* Set the network topology paths of the hosts.
+ *
+ * @param topologyPaths topology paths.
+ * @throws IOException If an I/O error occurred.
*/
public void setTopologyPaths(String[] topologyPaths) throws IOException {
if (topologyPaths == null) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ByteBufferUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ByteBufferUtil.java
index 6576fe5827d..f577649dd5f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ByteBufferUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ByteBufferUtil.java
@@ -47,6 +47,12 @@ public final class ByteBufferUtil {
/**
* Perform a fallback read.
+ *
+ * @param stream input stream.
+ * @param bufferPool bufferPool.
+ * @param maxLength maxLength.
+ * @throws IOException raised on errors performing I/O.
+ * @return byte buffer.
*/
public static ByteBuffer fallbackRead(
InputStream stream, ByteBufferPool bufferPool, int maxLength)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CachingGetSpaceUsed.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CachingGetSpaceUsed.java
index 362d125b09d..d7b61346d4e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CachingGetSpaceUsed.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CachingGetSpaceUsed.java
@@ -53,6 +53,9 @@ public abstract class CachingGetSpaceUsed implements Closeable, GetSpaceUsed {
/**
* This is the constructor used by the builder.
* All overriding classes should implement this.
+ *
+ * @param builder builder.
+ * @throws IOException raised on errors performing I/O.
*/
public CachingGetSpaceUsed(CachingGetSpaceUsed.Builder builder)
throws IOException {
@@ -140,6 +143,8 @@ public abstract class CachingGetSpaceUsed implements Closeable, GetSpaceUsed {
/**
* Increment the cached value of used space.
+ *
+ * @param value dfs used value.
*/
public void incDfsUsed(long value) {
used.addAndGet(value);
@@ -154,6 +159,8 @@ public abstract class CachingGetSpaceUsed implements Closeable, GetSpaceUsed {
/**
* How long in between runs of the background refresh.
+ *
+ * @return refresh interval.
*/
@VisibleForTesting
public long getRefreshInterval() {
@@ -163,6 +170,8 @@ public abstract class CachingGetSpaceUsed implements Closeable, GetSpaceUsed {
/**
* Randomize the refresh interval timing by this amount, the actual interval will be chosen
* uniformly between {@code interval-jitter} and {@code interval+jitter}.
+ *
+ * @return between interval-jitter and interval+jitter.
*/
@VisibleForTesting
public long getJitter() {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
index 59ffe00bcb2..0efcdc8022f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFileSystem.java
@@ -102,25 +102,44 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
return fs;
}
- /** Return the name of the checksum file associated with a file.*/
+ /**
+ * Return the name of the checksum file associated with a file.
+ *
+ * @param file the file path.
+ * @return name of the checksum file associated with a file.
+ */
public Path getChecksumFile(Path file) {
return new Path(file.getParent(), "." + file.getName() + ".crc");
}
- /** Return true iff file is a checksum file name.*/
+ /**
+ * Return true if file is a checksum file name.
+ *
+ * @param file the file path.
+ * @return if file is a checksum file true, not false.
+ */
public static boolean isChecksumFile(Path file) {
String name = file.getName();
return name.startsWith(".") && name.endsWith(".crc");
}
- /** Return the length of the checksum file given the size of the
+ /**
+ * Return the length of the checksum file given the size of the
* actual file.
- **/
+ *
+ * @param file the file path.
+ * @param fileSize file size.
+ * @return checksum length.
+ */
public long getChecksumFileLength(Path file, long fileSize) {
return getChecksumLength(fileSize, getBytesPerSum());
}
- /** Return the bytes Per Checksum */
+ /**
+ * Return the bytes Per Checksum.
+ *
+ * @return bytes per check sum.
+ */
public int getBytesPerSum() {
return bytesPerChecksum;
}
@@ -362,6 +381,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
* Opens an FSDataInputStream at the indicated Path.
* @param f the file name to open
* @param bufferSize the size of the buffer to be used.
+ * @throws IOException if an I/O error occurs.
*/
@Override
public FSDataInputStream open(Path f, int bufferSize) throws IOException {
@@ -669,7 +689,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
* Implement the abstract setReplication of FileSystem
* @param src file name
* @param replication new replication
- * @throws IOException
+ * @throws IOException if an I/O error occurs.
* @return true if successful;
* false if file does not exist or is a directory
*/
@@ -754,7 +774,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
* @param f
* given path
* @return the statuses of the files/directories in the given path
- * @throws IOException
+ * @throws IOException if an I/O error occurs.
*/
@Override
public FileStatus[] listStatus(Path f) throws IOException {
@@ -775,7 +795,7 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
* @param f
* given path
* @return the statuses of the files/directories in the given patch
- * @throws IOException
+ * @throws IOException if an I/O error occurs.
*/
@Override
public RemoteIterator listLocatedStatus(Path f)
@@ -811,6 +831,10 @@ public abstract class ChecksumFileSystem extends FilterFileSystem {
* Copy it from FS control to the local dst name.
* If src and dst are directories, the copyCrc parameter
* determines whether to copy CRC files.
+ * @param src src path.
+ * @param dst dst path.
+ * @param copyCrc copy csc flag.
+ * @throws IOException if an I/O error occurs.
*/
@SuppressWarnings("deprecation")
public void copyToLocalFile(Path src, Path dst, boolean copyCrc)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
index bc1122c56a2..4820c5c3045 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ChecksumFs.java
@@ -70,30 +70,53 @@ public abstract class ChecksumFs extends FilterFs {
this.verifyChecksum = inVerifyChecksum;
}
- /** get the raw file system. */
+ /**
+ * get the raw file system.
+ *
+ * @return abstract file system.
+ */
public AbstractFileSystem getRawFs() {
return getMyFs();
}
- /** Return the name of the checksum file associated with a file.*/
+ /**
+ * Return the name of the checksum file associated with a file.
+ *
+ * @param file the file path.
+ * @return the checksum file associated with a file.
+ */
public Path getChecksumFile(Path file) {
return new Path(file.getParent(), "." + file.getName() + ".crc");
}
- /** Return true iff file is a checksum file name.*/
+ /**
+ * Return true iff file is a checksum file name.
+ *
+ * @param file the file path.
+ * @return if is checksum file true,not false.
+ */
public static boolean isChecksumFile(Path file) {
String name = file.getName();
return name.startsWith(".") && name.endsWith(".crc");
}
- /** Return the length of the checksum file given the size of the
+ /**
+ * Return the length of the checksum file given the size of the
* actual file.
- **/
+ *
+ * @param file the file path.
+ * @param fileSize file size.
+ * @return check sum file length.
+ */
public long getChecksumFileLength(Path file, long fileSize) {
return getChecksumLength(fileSize, getBytesPerSum());
}
- /** Return the bytes Per Checksum. */
+ /**
+ * Return the bytes Per Checksum.
+ *
+ * @return bytes per sum.
+ */
public int getBytesPerSum() {
return defaultBytesPerChecksum;
}
@@ -433,7 +456,7 @@ public abstract class ChecksumFs extends FilterFs {
* Implement the abstract setReplication of FileSystem
* @param src file name
* @param replication new replication
- * @throws IOException
+ * @throws IOException if an I/O error occurs.
* @return true if successful;
* false if file does not exist or is a directory
*/
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
index a799e883bcf..52252365092 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CommonConfigurationKeysPublic.java
@@ -169,11 +169,11 @@ public class CommonConfigurationKeysPublic {
/**
* Number of filesystems instances can be created in parallel.
- *
+ *
* A higher number here does not necessarily improve performance, especially
* for object stores, where multiple threads may be attempting to create an FS
* instance for the same URI.
- *
+ *
* Default value: {@value}.
*/
public static final String FS_CREATION_PARALLEL_COUNT =
@@ -181,8 +181,9 @@ public class CommonConfigurationKeysPublic {
/**
* Default value for {@link #FS_CREATION_PARALLEL_COUNT}.
- *
+ *
* Default value: {@value}.
+ *
*/
public static final int FS_CREATION_PARALLEL_COUNT_DEFAULT =
64;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CompositeCrcFileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CompositeCrcFileChecksum.java
index e1ed5cbcfca..bdbc8f3a33f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CompositeCrcFileChecksum.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CompositeCrcFileChecksum.java
@@ -37,7 +37,13 @@ public class CompositeCrcFileChecksum extends FileChecksum {
private DataChecksum.Type crcType;
private int bytesPerCrc;
- /** Create a CompositeCrcFileChecksum. */
+ /**
+ * Create a CompositeCrcFileChecksum.
+ *
+ * @param crc crc.
+ * @param crcType crcType.
+ * @param bytesPerCrc bytesPerCrc.
+ */
public CompositeCrcFileChecksum(
int crc, DataChecksum.Type crcType, int bytesPerCrc) {
this.crc = crc;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java
index 79850e1a2f2..9f97a12fa60 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/ContentSummary.java
@@ -149,17 +149,31 @@ public class ContentSummary extends QuotaUsage implements Writable{
@Deprecated
public ContentSummary() {}
- /** Constructor, deprecated by ContentSummary.Builder
+ /**
+ * Constructor, deprecated by ContentSummary.Builder
* This constructor implicitly set spaceConsumed the same as length.
* spaceConsumed and length must be set explicitly with
- * ContentSummary.Builder
+ * ContentSummary.Builder.
+ *
+ * @param length length.
+ * @param fileCount file count.
+ * @param directoryCount directory count.
* */
@Deprecated
public ContentSummary(long length, long fileCount, long directoryCount) {
this(length, fileCount, directoryCount, -1L, length, -1L);
}
- /** Constructor, deprecated by ContentSummary.Builder */
+ /**
+ * Constructor, deprecated by ContentSummary.Builder.
+ *
+ * @param length length.
+ * @param fileCount file count.
+ * @param directoryCount directory count.
+ * @param quota quota.
+ * @param spaceConsumed space consumed.
+ * @param spaceQuota space quota.
+ * */
@Deprecated
public ContentSummary(
long length, long fileCount, long directoryCount, long quota,
@@ -172,7 +186,11 @@ public class ContentSummary extends QuotaUsage implements Writable{
setSpaceQuota(spaceQuota);
}
- /** Constructor for ContentSummary.Builder*/
+ /**
+ * Constructor for ContentSummary.Builder.
+ *
+ * @param builder builder.
+ */
private ContentSummary(Builder builder) {
super(builder);
this.length = builder.length;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CreateFlag.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CreateFlag.java
index 71993713ad2..ca008e53693 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CreateFlag.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/CreateFlag.java
@@ -189,6 +189,8 @@ public enum CreateFlag {
/**
* Validate the CreateFlag for the append operation. The flag must contain
* APPEND, and cannot contain OVERWRITE.
+ *
+ * @param flag enum set flag.
*/
public static void validateForAppend(EnumSet flag) {
validate(flag);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java
index da4636b2c0f..c5a052f3de4 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DF.java
@@ -65,7 +65,10 @@ public class DF extends Shell {
return dirPath;
}
- /** @return a string indicating which filesystem volume we're checking. */
+ /**
+ * @return a string indicating which filesystem volume we're checking.
+ * @throws IOException raised on errors performing I/O.
+ */
public String getFilesystem() throws IOException {
if (Shell.WINDOWS) {
this.filesystem = dirFile.getCanonicalPath().substring(0, 2);
@@ -100,7 +103,10 @@ public class DF extends Shell {
return (int) (used * 100.0 / cap);
}
- /** @return the filesystem mount point for the indicated volume */
+ /**
+ * @return the filesystem mount point for the indicated volume.
+ * @throws IOException raised on errors performing I/O.
+ */
public String getMount() throws IOException {
// Abort early if specified path does not exist
if (!dirFile.exists()) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java
index 33905dcbb77..6f6e3041065 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/DelegationTokenRenewer.java
@@ -47,7 +47,11 @@ public class DelegationTokenRenewer
/** @return the renew token. */
public Token> getRenewToken();
- /** Set delegation token. */
+ /**
+ * Set delegation token.
+ * @param generic type T.
+ * @param token token.
+ */
public void setDelegationToken(Token token);
}
@@ -172,7 +176,11 @@ public class DelegationTokenRenewer
/** Queue to maintain the RenewActions to be processed by the {@link #run()} */
private volatile DelayQueue> queue = new DelayQueue>();
- /** For testing purposes */
+ /**
+ * For testing purposes.
+ *
+ * @return renew queue length.
+ */
@VisibleForTesting
protected int getRenewQueueLength() {
return queue.size();
@@ -211,7 +219,13 @@ public class DelegationTokenRenewer
}
}
- /** Add a renew action to the queue. */
+ /**
+ * Add a renew action to the queue.
+ *
+ * @param generic type T.
+ * @param fs file system.
+ * @return renew action.
+ * */
@SuppressWarnings("static-access")
public RenewAction addRenewAction(final T fs) {
synchronized (this) {
@@ -230,8 +244,10 @@ public class DelegationTokenRenewer
/**
* Remove the associated renew action from the queue
- *
- * @throws IOException
+ *
+ * @param generic type T.
+ * @param fs file system.
+ * @throws IOException raised on errors performing I/O.
*/
public void removeRenewAction(
final T fs) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSBuilder.java
index a4c7254cfeb..56ef51f128d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSBuilder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSBuilder.java
@@ -37,12 +37,17 @@ public interface FSBuilder> {
/**
* Set optional Builder parameter.
+ * @param key key.
+ * @param value value.
+ * @return generic type B.
*/
B opt(@Nonnull String key, @Nonnull String value);
/**
* Set optional boolean parameter for the Builder.
- *
+ * @param key key.
+ * @param value value.
+ * @return generic type B.
* @see #opt(String, String)
*/
B opt(@Nonnull String key, boolean value);
@@ -50,6 +55,9 @@ public interface FSBuilder> {
/**
* Set optional int parameter for the Builder.
*
+ * @param key key.
+ * @param value value.
+ * @return generic type B.
* @see #opt(String, String)
*/
B opt(@Nonnull String key, int value);
@@ -57,6 +65,9 @@ public interface FSBuilder> {
/**
* Set optional float parameter for the Builder.
*
+ * @param key key.
+ * @param value value.
+ * @return generic type B.
* @see #opt(String, String)
*/
B opt(@Nonnull String key, float value);
@@ -64,6 +75,9 @@ public interface FSBuilder> {
/**
* Set optional long parameter for the Builder.
*
+ * @param key key.
+ * @param value value.
+ * @return generic type B.
* @see #opt(String, String)
*/
B opt(@Nonnull String key, long value);
@@ -71,6 +85,9 @@ public interface FSBuilder> {
/**
* Set optional double parameter for the Builder.
*
+ * @param key key.
+ * @param value value.
+ * @return generic type B.
* @see #opt(String, String)
*/
B opt(@Nonnull String key, double value);
@@ -78,6 +95,9 @@ public interface FSBuilder> {
/**
* Set an array of string values as optional parameter for the Builder.
*
+ * @param key key.
+ * @param values values.
+ * @return generic type B.
* @see #opt(String, String)
*/
B opt(@Nonnull String key, @Nonnull String... values);
@@ -87,12 +107,19 @@ public interface FSBuilder> {
*
* If the option is not supported or unavailable,
* the client should expect {@link #build()} throws IllegalArgumentException.
+ *
+ * @param key key.
+ * @param value value.
+ * @return generic type B.
*/
B must(@Nonnull String key, @Nonnull String value);
/**
* Set mandatory boolean option.
*
+ * @param key key.
+ * @param value value.
+ * @return generic type B.
* @see #must(String, String)
*/
B must(@Nonnull String key, boolean value);
@@ -100,6 +127,9 @@ public interface FSBuilder> {
/**
* Set mandatory int option.
*
+ * @param key key.
+ * @param value value.
+ * @return generic type B.
* @see #must(String, String)
*/
B must(@Nonnull String key, int value);
@@ -107,6 +137,9 @@ public interface FSBuilder> {
/**
* Set mandatory float option.
*
+ * @param key key.
+ * @param value value.
+ * @return generic type B.
* @see #must(String, String)
*/
B must(@Nonnull String key, float value);
@@ -114,6 +147,9 @@ public interface FSBuilder> {
/**
* Set mandatory long option.
*
+ * @param key key.
+ * @param value value.
+ * @return generic type B.
* @see #must(String, String)
*/
B must(@Nonnull String key, long value);
@@ -121,6 +157,9 @@ public interface FSBuilder> {
/**
* Set mandatory double option.
*
+ * @param key key.
+ * @param value value.
+ * @return generic type B.
* @see #must(String, String)
*/
B must(@Nonnull String key, double value);
@@ -128,6 +167,9 @@ public interface FSBuilder> {
/**
* Set a string array as mandatory option.
*
+ * @param key key.
+ * @param values values.
+ * @return generic type B.
* @see #must(String, String)
*/
B must(@Nonnull String key, @Nonnull String... values);
@@ -139,6 +181,7 @@ public interface FSBuilder> {
* @throws UnsupportedOperationException if the filesystem does not support
* the specific operation.
* @throws IOException on filesystem IO errors.
+ * @return generic type S.
*/
S build() throws IllegalArgumentException,
UnsupportedOperationException, IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java
index c96d499d17b..16938a83a69 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSDataOutputStreamBuilder.java
@@ -123,6 +123,9 @@ public abstract class FSDataOutputStreamBuilder
/**
* Constructor.
+ *
+ * @param fileSystem file system.
+ * @param p the path.
*/
protected FSDataOutputStreamBuilder(@Nonnull FileSystem fileSystem,
@Nonnull Path p) {
@@ -149,6 +152,9 @@ public abstract class FSDataOutputStreamBuilder
/**
* Set permission for the file.
+ *
+ * @param perm permission.
+ * @return B Generics Type.
*/
public B permission(@Nonnull final FsPermission perm) {
checkNotNull(perm);
@@ -162,6 +168,9 @@ public abstract class FSDataOutputStreamBuilder
/**
* Set the size of the buffer to be used.
+ *
+ * @param bufSize buffer size.
+ * @return Generics Type B.
*/
public B bufferSize(int bufSize) {
bufferSize = bufSize;
@@ -174,6 +183,9 @@ public abstract class FSDataOutputStreamBuilder
/**
* Set replication factor.
+ *
+ * @param replica replica.
+ * @return Generics Type B.
*/
public B replication(short replica) {
replication = replica;
@@ -186,6 +198,9 @@ public abstract class FSDataOutputStreamBuilder
/**
* Set block size.
+ *
+ * @param blkSize block size.
+ * @return B Generics Type.
*/
public B blockSize(long blkSize) {
blockSize = blkSize;
@@ -194,6 +209,8 @@ public abstract class FSDataOutputStreamBuilder
/**
* Return true to create the parent directories if they do not exist.
+ *
+ * @return if create the parent directories if they do not exist true,not false.
*/
protected boolean isRecursive() {
return recursive;
@@ -201,6 +218,8 @@ public abstract class FSDataOutputStreamBuilder
/**
* Create the parent directory if they do not exist.
+ *
+ * @return B Generics Type.
*/
public B recursive() {
recursive = true;
@@ -213,6 +232,9 @@ public abstract class FSDataOutputStreamBuilder
/**
* Set the facility of reporting progress.
+ *
+ * @param prog progress.
+ * @return B Generics Type.
*/
public B progress(@Nonnull final Progressable prog) {
checkNotNull(prog);
@@ -226,6 +248,8 @@ public abstract class FSDataOutputStreamBuilder
/**
* Create an FSDataOutputStream at the specified path.
+ *
+ * @return return Generics Type B.
*/
public B create() {
flags.add(CreateFlag.CREATE);
@@ -236,6 +260,9 @@ public abstract class FSDataOutputStreamBuilder
* Set to true to overwrite the existing file.
* Set it to false, an exception will be thrown when calling {@link #build()}
* if the file exists.
+ *
+ * @param overwrite overrite.
+ * @return Generics Type B.
*/
public B overwrite(boolean overwrite) {
if (overwrite) {
@@ -248,6 +275,8 @@ public abstract class FSDataOutputStreamBuilder
/**
* Append to an existing file (optional operation).
+ *
+ * @return Generics Type B.
*/
public B append() {
flags.add(CreateFlag.APPEND);
@@ -260,6 +289,9 @@ public abstract class FSDataOutputStreamBuilder
/**
* Set checksum opt.
+ *
+ * @param chksumOpt check sum opt.
+ * @return Generics Type B.
*/
public B checksumOpt(@Nonnull final ChecksumOpt chksumOpt) {
checkNotNull(chksumOpt);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java
index de66eab713a..ee16ca8a2cd 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSInputChecker.java
@@ -82,6 +82,7 @@ abstract public class FSInputChecker extends FSInputStream {
* @param sum the type of Checksum engine
* @param chunkSize maximun chunk size
* @param checksumSize the number byte of each checksum
+ * @param verifyChecksum verify check sum.
*/
protected FSInputChecker( Path file, int numOfRetries,
boolean verifyChecksum, Checksum sum, int chunkSize, int checksumSize ) {
@@ -118,6 +119,7 @@ abstract public class FSInputChecker extends FSInputStream {
* @param len maximum number of bytes to read
* @param checksum the data buffer into which to write checksums
* @return number of bytes read
+ * @throws IOException raised on errors performing I/O.
*/
abstract protected int readChunk(long pos, byte[] buf, int offset, int len,
byte[] checksum) throws IOException;
@@ -129,7 +131,10 @@ abstract public class FSInputChecker extends FSInputStream {
*/
abstract protected long getChunkPosition(long pos);
- /** Return true if there is a need for checksum verification */
+ /**
+ * Return true if there is a need for checksum verification.
+ * @return if there is a need for checksum verification true, not false.
+ */
protected synchronized boolean needChecksum() {
return verifyChecksum && sum != null;
}
@@ -357,6 +362,9 @@ abstract public class FSInputChecker extends FSInputStream {
* Convert a checksum byte array to a long
* This is deprecated since 0.22 since it is no longer in use
* by this class.
+ *
+ * @param checksum check sum.
+ * @return crc.
*/
@Deprecated
static public long checksum2long(byte[] checksum) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSLinkResolver.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSLinkResolver.java
index ffe4b34ca5f..f85cf7a8581 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSLinkResolver.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSLinkResolver.java
@@ -74,7 +74,7 @@ public abstract class FSLinkResolver {
* @param fc FileContext used to access file systems.
* @param path The path to resolve symlinks on.
* @return Generic type determined by the implementation of next.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public T resolve(final FileContext fc, final Path path) throws IOException {
int count = 0;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java
index 6de026b9d17..4ef512dc257 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FSOutputSummer.java
@@ -186,6 +186,8 @@ abstract public class FSOutputSummer extends OutputStream implements
/**
* Return the number of valid bytes currently in the buffer.
+ *
+ * @return buffer data size.
*/
protected synchronized int getBufferedDataSize() {
return count;
@@ -227,6 +229,10 @@ abstract public class FSOutputSummer extends OutputStream implements
/**
* Converts a checksum integer value to a byte stream
+ *
+ * @param sum check sum.
+ * @param checksumSize check sum size.
+ * @return byte stream.
*/
static public byte[] convertToByteStream(Checksum sum, int checksumSize) {
return int2byte((int)sum.getValue(), new byte[checksumSize]);
@@ -245,6 +251,8 @@ abstract public class FSOutputSummer extends OutputStream implements
/**
* Resets existing buffer with a new one of the specified size.
+ *
+ * @param size size.
*/
protected synchronized void setChecksumBufSize(int size) {
this.buf = new byte[size];
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java
index 6822fa48562..62d2e3af786 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileChecksum.java
@@ -28,20 +28,37 @@ import org.apache.hadoop.io.Writable;
@InterfaceAudience.Public
@InterfaceStability.Stable
public abstract class FileChecksum implements Writable {
- /** The checksum algorithm name */
+ /**
+ * The checksum algorithm name.
+ *
+ * @return algorithm name.
+ */
public abstract String getAlgorithmName();
- /** The length of the checksum in bytes */
+ /**
+ * The length of the checksum in bytes.
+ *
+ * @return length.
+ */
public abstract int getLength();
- /** The value of the checksum in bytes */
+ /**
+ * The value of the checksum in bytes.
+ *
+ * @return byte array.
+ */
public abstract byte[] getBytes();
public ChecksumOpt getChecksumOpt() {
return null;
}
- /** Return true if both the algorithms and the values are the same. */
+ /**
+ * Return true if both the algorithms and the values are the same.
+ *
+ * @param other other.
+ * @return if equal true, not false.
+ */
@Override
public boolean equals(Object other) {
if (other == this) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
index f3004ce7e03..298570bb55f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileContext.java
@@ -366,8 +366,8 @@ public class FileContext implements PathCapabilities {
* Create a FileContext with specified FS as default using the specified
* config.
*
- * @param defFS
- * @param aConf
+ * @param defFS default fs.
+ * @param aConf configutration.
* @return new FileContext with specified FS as default.
*/
public static FileContext getFileContext(final AbstractFileSystem defFS,
@@ -378,7 +378,7 @@ public class FileContext implements PathCapabilities {
/**
* Create a FileContext for specified file system using the default config.
*
- * @param defaultFS
+ * @param defaultFS default fs.
* @return a FileContext with the specified AbstractFileSystem
* as the default FS.
*/
@@ -411,6 +411,7 @@ public class FileContext implements PathCapabilities {
*
* @throws UnsupportedFileSystemException If the file system from the default
* configuration is not supported
+ * @return file context.
*/
public static FileContext getFileContext()
throws UnsupportedFileSystemException {
@@ -430,7 +431,7 @@ public class FileContext implements PathCapabilities {
/**
* Create a FileContext for specified URI using the default config.
*
- * @param defaultFsUri
+ * @param defaultFsUri defaultFsUri.
* @return a FileContext with the specified URI as the default FS.
*
* @throws UnsupportedFileSystemException If the file system for
@@ -444,8 +445,8 @@ public class FileContext implements PathCapabilities {
/**
* Create a FileContext for specified default URI using the specified config.
*
- * @param defaultFsUri
- * @param aConf
+ * @param defaultFsUri defaultFsUri.
+ * @param aConf configrution.
* @return new FileContext for specified uri
* @throws UnsupportedFileSystemException If the file system with specified is
* not supported
@@ -476,7 +477,7 @@ public class FileContext implements PathCapabilities {
* {@link #getFileContext(URI, Configuration)} instead of this one.
*
*
- * @param aConf
+ * @param aConf configration.
* @return new FileContext
* @throws UnsupportedFileSystemException If file system in the config
* is not supported
@@ -554,6 +555,7 @@ public class FileContext implements PathCapabilities {
/**
* Gets the working directory for wd-relative names (such a "foo/bar").
+ * @return the path.
*/
public Path getWorkingDirectory() {
return workingDir;
@@ -600,13 +602,14 @@ public class FileContext implements PathCapabilities {
* @throws FileNotFoundException If f
does not exist
* @throws AccessControlException if access denied
* @throws IOException If an IO Error occurred
- *
+ * @throws UnresolvedLinkException If unresolved link occurred.
+ *
* Exceptions applicable to file systems accessed over RPC:
* @throws RpcClientException If an exception occurred in the RPC client
* @throws RpcServerException If an exception occurred in the RPC server
* @throws UnexpectedServerException If server implementation throws
* undeclared exception to RPC server
- *
+ *
* RuntimeExceptions:
* @throws InvalidPathException If path f
is not valid
*/
@@ -620,7 +623,7 @@ public class FileContext implements PathCapabilities {
* A Fully-qualified path has scheme and authority specified and an absolute
* path.
* Use the default file system and working dir in this FileContext to qualify.
- * @param path
+ * @param path the path.
* @return qualified path
*/
public Path makeQualified(final Path path) {
@@ -759,6 +762,7 @@ public class FileContext implements PathCapabilities {
*
* Client should expect {@link FSDataOutputStreamBuilder#build()} throw the
* same exceptions as create(Path, EnumSet, CreateOpts...).
+ * @throws IOException If an I/O error occurred.
*/
public FSDataOutputStreamBuilder create(final Path f)
throws IOException {
@@ -832,6 +836,8 @@ public class FileContext implements PathCapabilities {
*
* RuntimeExceptions:
* @throws InvalidPathException If path f
is invalid
+ *
+ * @return if delete success true, not false.
*/
public boolean delete(final Path f, final boolean recursive)
throws AccessControlException, FileNotFoundException,
@@ -862,6 +868,7 @@ public class FileContext implements PathCapabilities {
* @throws RpcServerException If an exception occurred in the RPC server
* @throws UnexpectedServerException If server implementation throws
* undeclared exception to RPC server
+ * @return input stream.
*/
public FSDataInputStream open(final Path f) throws AccessControlException,
FileNotFoundException, UnsupportedFileSystemException, IOException {
@@ -892,6 +899,7 @@ public class FileContext implements PathCapabilities {
* @throws RpcServerException If an exception occurred in the RPC server
* @throws UnexpectedServerException If server implementation throws
* undeclared exception to RPC server
+ * @return output stream.
*/
public FSDataInputStream open(final Path f, final int bufferSize)
throws AccessControlException, FileNotFoundException,
@@ -1001,6 +1009,7 @@ public class FileContext implements PathCapabilities {
*
* @param src path to be renamed
* @param dst new path after rename
+ * @param options rename options.
*
* @throws AccessControlException If access is denied
* @throws FileAlreadyExistsException If dst
already exists and
@@ -1052,7 +1061,7 @@ public class FileContext implements PathCapabilities {
/**
* Set permission of a path.
- * @param f
+ * @param f the path.
* @param permission - the new absolute permission (umask is not applied)
*
* @throws AccessControlException If access is denied
@@ -1196,7 +1205,7 @@ public class FileContext implements PathCapabilities {
* Set the verify checksum flag for the file system denoted by the path.
* This is only applicable if the
* corresponding FileSystem supports checksum. By default doesn't do anything.
- * @param verifyChecksum
+ * @param verifyChecksum verify check sum.
* @param f set the verifyChecksum for the Filesystem containing this path
*
* @throws AccessControlException If access is denied
@@ -1251,8 +1260,9 @@ public class FileContext implements PathCapabilities {
/**
* Synchronize client metadata state.
*
- * @throws IOException
- * @throws UnsupportedOperationException
+ * @throws IOException If an I/O error occurred.
+ * @throws UnsupportedOperationException If file system for f
is
+ * not supported.
*/
public void msync() throws IOException, UnsupportedOperationException {
defaultFS.msync();
@@ -1613,9 +1623,12 @@ public class FileContext implements PathCapabilities {
}
/**
+ * List CorruptFile Blocks.
+ *
+ * @param path the path.
* @return an iterator over the corrupt files under the given path
* (may contain duplicates if a file has more than one corrupt block)
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
public RemoteIterator listCorruptFileBlocks(Path path)
throws IOException {
@@ -1739,6 +1752,7 @@ public class FileContext implements PathCapabilities {
* @throws RpcServerException If an exception occurred in the RPC server
* @throws UnexpectedServerException If server implementation throws
* undeclared exception to RPC server
+ * @return if f exists true, not false.
*/
public boolean exists(final Path f) throws AccessControlException,
UnsupportedFileSystemException, IOException {
@@ -1799,6 +1813,12 @@ public class FileContext implements PathCapabilities {
/**
* See {@link #listStatus(Path[], PathFilter)}
+ *
+ * @param files files.
+ * @throws AccessControlException If access is denied.
+ * @throws FileNotFoundException If files
does not exist.
+ * @throws IOException If an I/O error occurred.
+ * @return file status array.
*/
public FileStatus[] listStatus(Path[] files) throws AccessControlException,
FileNotFoundException, IOException {
@@ -2054,36 +2074,29 @@ public class FileContext implements PathCapabilities {
* ?
* Matches any single character.
*
- *
*
*
* Matches zero or more characters.
*
- *
*
[abc ]
* Matches a single character from character set
* {a,b,c } .
*
- *
*
[a -b ]
* Matches a single character from the character range
* {a...b } . Note: character a must be
* lexicographically less than or equal to character b .
*
- *
*
[^a ]
* Matches a single char that is not from character set or range
* {a } . Note that the ^ character must occur
* immediately to the right of the opening bracket.
*
- *
*
\c
* Removes (escapes) any special meaning of character c .
*
- *
*
{ab,cd}
* Matches a string from the string set {ab, cd }
- *
- *
+ *
*
{ab,c{de,fh}}
* Matches a string from string set {ab, cde, cfh }
*
@@ -2144,6 +2157,18 @@ public class FileContext implements PathCapabilities {
/**
* Copy file from src to dest. See
* {@link #copy(Path, Path, boolean, boolean)}
+ *
+ * @param src src.
+ * @param dst dst.
+ * @throws AccessControlException If access is denied.
+ * @throws FileAlreadyExistsException If file src
already exists.
+ * @throws FileNotFoundException if next file does not exist any more.
+ * @throws ParentNotDirectoryException If parent of src
is not a
+ * directory.
+ * @throws UnsupportedFileSystemException If file system for
+ * src/dst
is not supported.
+ * @throws IOException If an I/O error occurred.
+ * @return if success copy true, not false.
*/
public boolean copy(final Path src, final Path dst)
throws AccessControlException, FileAlreadyExistsException,
@@ -2154,8 +2179,8 @@ public class FileContext implements PathCapabilities {
/**
* Copy from src to dst, optionally deleting src and overwriting dst.
- * @param src
- * @param dst
+ * @param src src.
+ * @param dst dst.
* @param deleteSource - delete src if true
* @param overwrite overwrite dst if true; throw IOException if dst exists
* and overwrite is false.
@@ -2276,7 +2301,7 @@ public class FileContext implements PathCapabilities {
* Are qualSrc and qualDst of the same file system?
* @param qualPath1 - fully qualified path
* @param qualPath2 - fully qualified path
- * @return
+ * @return is same fs true,not false.
*/
private static boolean isSameFS(Path qualPath1, Path qualPath2) {
URI srcUri = qualPath1.toUri();
@@ -2299,6 +2324,13 @@ public class FileContext implements PathCapabilities {
/**
* Resolves all symbolic links in the specified path.
* Returns the new path object.
+ *
+ * @param f the path.
+ * @throws FileNotFoundException If f
does not exist.
+ * @throws UnresolvedLinkException If unresolved link occurred.
+ * @throws AccessControlException If access is denied.
+ * @throws IOException If an I/O error occurred.
+ * @return resolve path.
*/
protected Path resolve(final Path f) throws FileNotFoundException,
UnresolvedLinkException, AccessControlException, IOException {
@@ -2316,6 +2348,7 @@ public class FileContext implements PathCapabilities {
* to, but not including the final path component.
* @param f path to resolve
* @return the new path object.
+ * @throws IOException If an I/O error occurred.
*/
protected Path resolveIntermediate(final Path f) throws IOException {
return new FSLinkResolver() {
@@ -2334,7 +2367,7 @@ public class FileContext implements PathCapabilities {
* @param f
* Path which needs to be resolved
* @return List of AbstractFileSystems accessed in the path
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
Set resolveAbstractFileSystems(final Path f)
throws IOException {
@@ -2395,7 +2428,7 @@ public class FileContext implements PathCapabilities {
* @param p Path for which delegations tokens are requested.
* @param renewer the account name that is allowed to renew the token.
* @return List of delegation tokens.
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
@InterfaceAudience.LimitedPrivate( { "HDFS", "MapReduce" })
public List> getDelegationTokens(
@@ -2547,7 +2580,7 @@ public class FileContext implements PathCapabilities {
* @param path Path to modify
* @param name xattr name.
* @param value xattr value.
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
public void setXAttr(Path path, String name, byte[] value)
throws IOException {
@@ -2566,7 +2599,7 @@ public class FileContext implements PathCapabilities {
* @param name xattr name.
* @param value xattr value.
* @param flag xattr set flag
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
public void setXAttr(Path path, final String name, final byte[] value,
final EnumSet flag) throws IOException {
@@ -2591,7 +2624,7 @@ public class FileContext implements PathCapabilities {
* @param path Path to get extended attribute
* @param name xattr name.
* @return byte[] xattr value.
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
public byte[] getXAttr(Path path, final String name) throws IOException {
final Path absF = fixRelativePart(path);
@@ -2614,7 +2647,7 @@ public class FileContext implements PathCapabilities {
* @param path Path to get extended attributes
* @return Map{@literal <}String, byte[]{@literal >} describing the XAttrs
* of the file or directory
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
public Map getXAttrs(Path path) throws IOException {
final Path absF = fixRelativePart(path);
@@ -2638,7 +2671,7 @@ public class FileContext implements PathCapabilities {
* @param names XAttr names.
* @return Map{@literal <}String, byte[]{@literal >} describing the XAttrs
* of the file or directory
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
public Map getXAttrs(Path path, final List names)
throws IOException {
@@ -2661,7 +2694,7 @@ public class FileContext implements PathCapabilities {
*
* @param path Path to remove extended attribute
* @param name xattr name
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
public void removeXAttr(Path path, final String name) throws IOException {
final Path absF = fixRelativePart(path);
@@ -2685,7 +2718,7 @@ public class FileContext implements PathCapabilities {
* @param path Path to get extended attributes
* @return List{@literal <}String{@literal >} of the XAttr names of the
* file or directory
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
public List listXAttrs(Path path) throws IOException {
final Path absF = fixRelativePart(path);
@@ -2802,7 +2835,7 @@ public class FileContext implements PathCapabilities {
/**
* Set the source path to satisfy storage policy.
* @param path The source path referring to either a directory or a file.
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
public void satisfyStoragePolicy(final Path path)
throws IOException {
@@ -2824,6 +2857,7 @@ public class FileContext implements PathCapabilities {
* @param policyName the name of the target storage policy. The list
* of supported Storage policies can be retrieved
* via {@link #getAllStoragePolicies}.
+ * @throws IOException If an I/O error occurred.
*/
public void setStoragePolicy(final Path path, final String policyName)
throws IOException {
@@ -2841,7 +2875,7 @@ public class FileContext implements PathCapabilities {
/**
* Unset the storage policy set for a given file or directory.
* @param src file or directory path.
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
public void unsetStoragePolicy(final Path src) throws IOException {
final Path absF = fixRelativePart(src);
@@ -2860,7 +2894,7 @@ public class FileContext implements PathCapabilities {
*
* @param path file or directory path.
* @return storage policy for give file.
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
public BlockStoragePolicySpi getStoragePolicy(Path path) throws IOException {
final Path absF = fixRelativePart(path);
@@ -2878,7 +2912,7 @@ public class FileContext implements PathCapabilities {
* Retrieve all the storage policies supported by this file system.
*
* @return all storage policies supported by this filesystem.
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
public Collection extends BlockStoragePolicySpi> getAllStoragePolicies()
throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java
index 9260b9a62c6..f50c06cec38 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileEncryptionInfo.java
@@ -52,6 +52,7 @@ public class FileEncryptionInfo implements Serializable {
* @param keyName name of the key used for the encryption zone
* @param ezKeyVersionName name of the KeyVersion used to encrypt the
* encrypted data encryption key.
+ * @param version version.
*/
public FileEncryptionInfo(final CipherSuite suite,
final CryptoProtocolVersion version, final byte[] edek,
@@ -134,6 +135,8 @@ public class FileEncryptionInfo implements Serializable {
*
* NOTE:
* Currently this method is used by CLI for backward compatibility.
+ *
+ * @return stable string.
*/
public String toStringStable() {
StringBuilder builder = new StringBuilder("{")
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java
index d7ca8f172f8..fcef578b072 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileStatus.java
@@ -116,6 +116,17 @@ public class FileStatus implements Writable, Comparable,
/**
* Constructor for file systems on which symbolic links are not supported
+ *
+ * @param length length.
+ * @param isdir isdir.
+ * @param block_replication block replication.
+ * @param blocksize block size.
+ * @param modification_time modification time.
+ * @param access_time access_time.
+ * @param permission permission.
+ * @param owner owner.
+ * @param group group.
+ * @param path the path.
*/
public FileStatus(long length, boolean isdir,
int block_replication,
@@ -182,6 +193,7 @@ public class FileStatus implements Writable, Comparable,
* Copy constructor.
*
* @param other FileStatus to copy
+ * @throws IOException raised on errors performing I/O.
*/
public FileStatus(FileStatus other) throws IOException {
// It's important to call the getters here instead of directly accessing the
@@ -375,6 +387,8 @@ public class FileStatus implements Writable, Comparable,
/**
* @return The contents of the symbolic link.
+ *
+ * @throws IOException raised on errors performing I/O.
*/
public Path getSymlink() throws IOException {
if (!isSymlink()) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
index aa194e84a35..0bc419b0353 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystem.java
@@ -104,13 +104,13 @@ import static org.apache.hadoop.fs.impl.PathCapabilitiesSupport.validatePathCapa
* All user code that may potentially use the Hadoop Distributed
* File System should be written to use a FileSystem object or its
* successor, {@link FileContext}.
- *
+ *
*
* The local implementation is {@link LocalFileSystem} and distributed
* implementation is DistributedFileSystem. There are other implementations
* for object stores and (outside the Apache Hadoop codebase),
* third party filesystems.
- *
+ *
* Notes
*
* The behaviour of the filesystem is
@@ -133,13 +133,12 @@ import static org.apache.hadoop.fs.impl.PathCapabilitiesSupport.validatePathCapa
* New methods may be marked as Unstable or Evolving for their initial release,
* as a warning that they are new and may change based on the
* experience of use in applications.
- *
+ *
* Important note for developers
- *
+ *
* If you are making changes here to the public API or protected methods,
* you must review the following subclasses and make sure that
* they are filtering/passing through new methods as appropriate.
- *
*
* {@link FilterFileSystem}: methods are passed through. If not,
* then {@code TestFilterFileSystem.MustNotImplement} must be
@@ -148,21 +147,22 @@ import static org.apache.hadoop.fs.impl.PathCapabilitiesSupport.validatePathCapa
* {@link #hasPathCapability(Path, String)} then
* {@link FilterFileSystem#hasPathCapability(Path, String)}
* must return false, always.
- *
+ *
* {@link ChecksumFileSystem}: checksums are created and
* verified.
- *
+ *
* {@code TestHarFileSystem} will need its {@code MustNotImplement}
* interface updated.
- *
*
+ *
* There are some external places your changes will break things.
* Do co-ordinate changes here.
- *
+ *
*
* HBase: HBoss
- *
+ *
* Hive: HiveShim23
+ *
* {@code shims/0.23/src/main/java/org/apache/hadoop/hive/shims/Hadoop23Shims.java}
*
*****************************************************************/
@@ -281,6 +281,8 @@ public abstract class FileSystem extends Configured
/**
* Returns the configured FileSystem implementation.
* @param conf the configuration to use
+ * @return FileSystem.
+ * @throws IOException If an I/O error occurred.
*/
public static FileSystem get(Configuration conf) throws IOException {
return get(getDefaultUri(conf), conf);
@@ -375,6 +377,7 @@ public abstract class FileSystem extends Configured
* implement that method.
*
* @see #canonicalizeUri(URI)
+ * @return the URI of this filesystem.
*/
protected URI getCanonicalUri() {
return canonicalizeUri(getUri());
@@ -391,6 +394,7 @@ public abstract class FileSystem extends Configured
* not specified and if {@link #getDefaultPort()} returns a
* default port.
*
+ * @param uri url.
* @return URI
* @see NetUtils#getCanonicalUri(URI, int)
*/
@@ -454,11 +458,21 @@ public abstract class FileSystem extends Configured
: null;
}
- /** @deprecated call {@link #getUri()} instead.*/
+ /**
+ * @return uri to string.
+ * @deprecated call {@link #getUri()} instead.
+ */
@Deprecated
public String getName() { return getUri().toString(); }
- /** @deprecated call {@link #get(URI, Configuration)} instead. */
+ /**
+ * @deprecated call {@link #get(URI, Configuration)} instead.
+ *
+ * @param name name.
+ * @param conf configuration.
+ * @return file system.
+ * @throws IOException If an I/O error occurred.
+ */
@Deprecated
public static FileSystem getNamed(String name, Configuration conf)
throws IOException {
@@ -513,6 +527,9 @@ public abstract class FileSystem extends Configured
* configuration and URI, cached and returned to the caller.
*
*
+ * @param uri uri of the filesystem.
+ * @param conf configrution.
+ * @return filesystem instance.
* @throws IOException if the FileSystem cannot be instantiated.
*/
public static FileSystem get(URI uri, Configuration conf) throws IOException {
@@ -542,7 +559,7 @@ public abstract class FileSystem extends Configured
/**
* Returns the FileSystem for this URI's scheme and authority and the
* given user. Internally invokes {@link #newInstance(URI, Configuration)}
- * @param uri of the filesystem
+ * @param uri uri of the filesystem.
* @param conf the configuration to use
* @param user to perform the get as
* @return filesystem instance
@@ -860,6 +877,7 @@ public abstract class FileSystem extends Configured
* @param start offset into the given file
* @param len length for which to get locations for
* @throws IOException IO failure
+ * @return block location array.
*/
public BlockLocation[] getFileBlockLocations(FileStatus file,
long start, long len) throws IOException {
@@ -900,6 +918,7 @@ public abstract class FileSystem extends Configured
* @param len length for which to get locations for
* @throws FileNotFoundException when the path does not exist
* @throws IOException IO failure
+ * @return block location array.
*/
public BlockLocation[] getFileBlockLocations(Path p,
long start, long len) throws IOException {
@@ -962,6 +981,7 @@ public abstract class FileSystem extends Configured
* @param f the file name to open
* @param bufferSize the size of the buffer to be used.
* @throws IOException IO failure
+ * @return input stream.
*/
public abstract FSDataInputStream open(Path f, int bufferSize)
throws IOException;
@@ -970,6 +990,7 @@ public abstract class FileSystem extends Configured
* Opens an FSDataInputStream at the indicated Path.
* @param f the file to open
* @throws IOException IO failure
+ * @return input stream.
*/
public FSDataInputStream open(Path f) throws IOException {
return open(f, getConf().getInt(IO_FILE_BUFFER_SIZE_KEY,
@@ -987,6 +1008,7 @@ public abstract class FileSystem extends Configured
* @throws IOException IO failure
* @throws UnsupportedOperationException If {@link #open(PathHandle, int)}
* not overridden by subclass
+ * @return input stream.
*/
public FSDataInputStream open(PathHandle fd) throws IOException {
return open(fd, getConf().getInt(IO_FILE_BUFFER_SIZE_KEY,
@@ -1004,6 +1026,7 @@ public abstract class FileSystem extends Configured
* not satisfied
* @throws IOException IO failure
* @throws UnsupportedOperationException If not overridden by subclass
+ * @return input stream.
*/
public FSDataInputStream open(PathHandle fd, int bufferSize)
throws IOException {
@@ -1021,6 +1044,7 @@ public abstract class FileSystem extends Configured
* not overridden by subclass.
* @throws UnsupportedOperationException If this FileSystem cannot enforce
* the specified constraints.
+ * @return path handle.
*/
public final PathHandle getPathHandle(FileStatus stat, HandleOpt... opt) {
// method is final with a default so clients calling getPathHandle(stat)
@@ -1036,6 +1060,7 @@ public abstract class FileSystem extends Configured
* @param stat Referent in the target FileSystem
* @param opt Constraints that determine the validity of the
* {@link PathHandle} reference.
+ * @return path handle.
*/
protected PathHandle createPathHandle(FileStatus stat, HandleOpt... opt) {
throw new UnsupportedOperationException();
@@ -1046,6 +1071,7 @@ public abstract class FileSystem extends Configured
* Files are overwritten by default.
* @param f the file to create
* @throws IOException IO failure
+ * @return output stream.
*/
public FSDataOutputStream create(Path f) throws IOException {
return create(f, true);
@@ -1057,6 +1083,7 @@ public abstract class FileSystem extends Configured
* @param overwrite if a file with this name already exists, then if true,
* the file will be overwritten, and if false an exception will be thrown.
* @throws IOException IO failure
+ * @return output stream.
*/
public FSDataOutputStream create(Path f, boolean overwrite)
throws IOException {
@@ -1074,6 +1101,7 @@ public abstract class FileSystem extends Configured
* @param f the file to create
* @param progress to report progress
* @throws IOException IO failure
+ * @return output stream.
*/
public FSDataOutputStream create(Path f, Progressable progress)
throws IOException {
@@ -1090,6 +1118,7 @@ public abstract class FileSystem extends Configured
* @param f the file to create
* @param replication the replication factor
* @throws IOException IO failure
+ * @return output stream1
*/
public FSDataOutputStream create(Path f, short replication)
throws IOException {
@@ -1108,6 +1137,7 @@ public abstract class FileSystem extends Configured
* @param replication the replication factor
* @param progress to report progress
* @throws IOException IO failure
+ * @return output stream.
*/
public FSDataOutputStream create(Path f, short replication,
Progressable progress) throws IOException {
@@ -1125,6 +1155,7 @@ public abstract class FileSystem extends Configured
* the file will be overwritten, and if false an error will be thrown.
* @param bufferSize the size of the buffer to be used.
* @throws IOException IO failure
+ * @return output stream.
*/
public FSDataOutputStream create(Path f,
boolean overwrite,
@@ -1144,7 +1175,9 @@ public abstract class FileSystem extends Configured
* @param overwrite if a file with this name already exists, then if true,
* the file will be overwritten, and if false an error will be thrown.
* @param bufferSize the size of the buffer to be used.
+ * @param progress to report progress.
* @throws IOException IO failure
+ * @return output stream.
*/
public FSDataOutputStream create(Path f,
boolean overwrite,
@@ -1164,7 +1197,9 @@ public abstract class FileSystem extends Configured
* the file will be overwritten, and if false an error will be thrown.
* @param bufferSize the size of the buffer to be used.
* @param replication required block replication for the file.
+ * @param blockSize the size of the buffer to be used.
* @throws IOException IO failure
+ * @return output stream.
*/
public FSDataOutputStream create(Path f,
boolean overwrite,
@@ -1182,7 +1217,10 @@ public abstract class FileSystem extends Configured
* the file will be overwritten, and if false an error will be thrown.
* @param bufferSize the size of the buffer to be used.
* @param replication required block replication for the file.
+ * @param blockSize the size of the buffer to be used.
+ * @param progress to report progress.
* @throws IOException IO failure
+ * @return output stream.
*/
public FSDataOutputStream create(Path f,
boolean overwrite,
@@ -1209,6 +1247,7 @@ public abstract class FileSystem extends Configured
* @param progress the progress reporter
* @throws IOException IO failure
* @see #setPermission(Path, FsPermission)
+ * @return output stream.
*/
public abstract FSDataOutputStream create(Path f,
FsPermission permission,
@@ -1230,6 +1269,7 @@ public abstract class FileSystem extends Configured
* @param progress the progress reporter
* @throws IOException IO failure
* @see #setPermission(Path, FsPermission)
+ * @return output stream.
*/
public FSDataOutputStream create(Path f,
FsPermission permission,
@@ -1256,6 +1296,7 @@ public abstract class FileSystem extends Configured
* found in conf will be used.
* @throws IOException IO failure
* @see #setPermission(Path, FsPermission)
+ * @return output stream.
*/
public FSDataOutputStream create(Path f,
FsPermission permission,
@@ -1277,6 +1318,16 @@ public abstract class FileSystem extends Configured
* the permission with umask before calling this method.
* This a temporary method added to support the transition from FileSystem
* to FileContext for user applications.
+ *
+ * @param f path.
+ * @param absolutePermission permission.
+ * @param flag create flag.
+ * @param bufferSize buffer size.
+ * @param replication replication.
+ * @param blockSize block size.
+ * @param progress progress.
+ * @param checksumOpt check sum opt.
+ * @return output stream.
* @throws IOException IO failure
*/
@Deprecated
@@ -1331,6 +1382,11 @@ public abstract class FileSystem extends Configured
* with umask before calling this method.
* This a temporary method added to support the transition from FileSystem
* to FileContext for user applications.
+ *
+ * @param f the path.
+ * @param absolutePermission permission.
+ * @param createParent create parent.
+ * @throws IOException IO failure.
*/
@Deprecated
protected void primitiveMkdir(Path f, FsPermission absolutePermission,
@@ -1370,6 +1426,7 @@ public abstract class FileSystem extends Configured
* @param progress the progress reporter
* @throws IOException IO failure
* @see #setPermission(Path, FsPermission)
+ * @return output stream.
*/
public FSDataOutputStream createNonRecursive(Path f,
boolean overwrite,
@@ -1393,6 +1450,7 @@ public abstract class FileSystem extends Configured
* @param progress the progress reporter
* @throws IOException IO failure
* @see #setPermission(Path, FsPermission)
+ * @return output stream.
*/
public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
boolean overwrite, int bufferSize, short replication, long blockSize,
@@ -1416,6 +1474,7 @@ public abstract class FileSystem extends Configured
* @param progress the progress reporter
* @throws IOException IO failure
* @see #setPermission(Path, FsPermission)
+ * @return output stream.
*/
public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
EnumSet flags, int bufferSize, short replication, long blockSize,
@@ -1430,6 +1489,7 @@ public abstract class FileSystem extends Configured
* Important: the default implementation is not atomic
* @param f path to use for create
* @throws IOException IO failure
+ * @return if create new file success true,not false.
*/
public boolean createNewFile(Path f) throws IOException {
if (exists(f)) {
@@ -1450,6 +1510,7 @@ public abstract class FileSystem extends Configured
* @throws IOException IO failure
* @throws UnsupportedOperationException if the operation is unsupported
* (default).
+ * @return output stream.
*/
public FSDataOutputStream append(Path f) throws IOException {
return append(f, getConf().getInt(IO_FILE_BUFFER_SIZE_KEY,
@@ -1464,6 +1525,7 @@ public abstract class FileSystem extends Configured
* @throws IOException IO failure
* @throws UnsupportedOperationException if the operation is unsupported
* (default).
+ * @return output stream.
*/
public FSDataOutputStream append(Path f, int bufferSize) throws IOException {
return append(f, bufferSize, null);
@@ -1477,6 +1539,7 @@ public abstract class FileSystem extends Configured
* @throws IOException IO failure
* @throws UnsupportedOperationException if the operation is unsupported
* (default).
+ * @return output stream.
*/
public abstract FSDataOutputStream append(Path f, int bufferSize,
Progressable progress) throws IOException;
@@ -1515,7 +1578,7 @@ public abstract class FileSystem extends Configured
* This is the default behavior.
* @param src file name
* @param replication new replication
- * @throws IOException
+ * @throws IOException an IO failure.
* @return true if successful, or the feature in unsupported;
* false if replication is supported but the file does not exist,
* or is a directory
@@ -1544,11 +1607,12 @@ public abstract class FileSystem extends Configured
*
* If OVERWRITE option is not passed as an argument, rename fails
* if the dst already exists.
+ *
*
* If OVERWRITE option is passed as an argument, rename overwrites
* the dst if it is a file or an empty directory. Rename fails if dst is
* a non-empty directory.
- *
+ *
* Note that atomicity of rename is dependent on the file system
* implementation. Please refer to the file system documentation for
* details. This default implementation is non atomic.
@@ -1556,9 +1620,11 @@ public abstract class FileSystem extends Configured
* This method is deprecated since it is a temporary method added to
* support the transition from FileSystem to FileContext for user
* applications.
+ *
*
* @param src path to be renamed
* @param dst new path after rename
+ * @param options rename options.
* @throws FileNotFoundException src path does not exist, or the parent
* path of dst does not exist.
* @throws FileAlreadyExistsException dest path exists and is a file
@@ -1653,6 +1719,9 @@ public abstract class FileSystem extends Configured
/**
* Delete a file/directory.
+ * @param f the path.
+ * @throws IOException IO failure.
+ * @return if delete success true, not false.
* @deprecated Use {@link #delete(Path, boolean)} instead.
*/
@Deprecated
@@ -1769,6 +1838,7 @@ public abstract class FileSystem extends Configured
* @param f path to check
* @throws IOException IO failure
* @deprecated Use {@link #getFileStatus(Path)} instead
+ * @return if f is directory true, not false.
*/
@Deprecated
public boolean isDirectory(Path f) throws IOException {
@@ -1786,6 +1856,7 @@ public abstract class FileSystem extends Configured
* @param f path to check
* @throws IOException IO failure
* @deprecated Use {@link #getFileStatus(Path)} instead
+ * @return if f is file true, not false.
*/
@Deprecated
public boolean isFile(Path f) throws IOException {
@@ -1798,6 +1869,7 @@ public abstract class FileSystem extends Configured
/**
* The number of bytes in a file.
+ * @param f the path.
* @return the number of bytes; 0 for a directory
* @deprecated Use {@link #getFileStatus(Path)} instead.
* @throws FileNotFoundException if the path does not resolve
@@ -1812,6 +1884,7 @@ public abstract class FileSystem extends Configured
* @param f path to use
* @throws FileNotFoundException if the path does not resolve
* @throws IOException IO failure
+ * @return content summary.
*/
public ContentSummary getContentSummary(Path f) throws IOException {
FileStatus status = getFileStatus(f);
@@ -1946,9 +2019,9 @@ public abstract class FileSystem extends Configured
* @param f Path to list
* @param token opaque iteration token returned by previous call, or null
* if this is the first call.
- * @return
- * @throws FileNotFoundException
- * @throws IOException
+ * @return directory entries.
+ * @throws FileNotFoundException when the path does not exist.
+ * @throws IOException If an I/O error occurred.
*/
@InterfaceAudience.Private
protected DirectoryEntries listStatusBatch(Path f, byte[] token) throws
@@ -1979,6 +2052,8 @@ public abstract class FileSystem extends Configured
/**
* List corrupted file blocks.
+ *
+ * @param path the path.
* @return an iterator over the corrupt files under the given path
* (may contain duplicates if a file has more than one corrupt block)
* @throws UnsupportedOperationException if the operation is unsupported
@@ -2072,36 +2147,29 @@ public abstract class FileSystem extends Configured
* ?
* Matches any single character.
*
- *
*
*
* Matches zero or more characters.
*
- *
*
[abc ]
* Matches a single character from character set
* {a,b,c } .
*
- *
*
[a -b ]
* Matches a single character from the character range
* {a...b } . Note that character a must be
* lexicographically less than or equal to character b .
*
- *
*
[^a ]
* Matches a single character that is not from character set or range
* {a } . Note that the ^ character must occur
* immediately to the right of the opening bracket.
*
- *
*
\c
* Removes (escapes) any special meaning of character c .
*
- *
*
{ab,cd}
* Matches a string from the string set {ab, cd }
*
- *
*
{ab,c{de,fh}}
* Matches a string from the string set {ab, cde, cfh }
*
@@ -2332,6 +2400,7 @@ public abstract class FileSystem extends Configured
/** Return the current user's home directory in this FileSystem.
* The default implementation returns {@code "/user/$USER/"}.
+ * @return the path.
*/
public Path getHomeDirectory() {
String username;
@@ -2394,6 +2463,7 @@ public abstract class FileSystem extends Configured
* @param f path to create
* @param permission to apply to f
* @throws IOException IO failure
+ * @return if mkdir success true, not false.
*/
public abstract boolean mkdirs(Path f, FsPermission permission
) throws IOException;
@@ -2441,6 +2511,7 @@ public abstract class FileSystem extends Configured
* @param delSrc whether to delete the src
* @param src path
* @param dst path
+ * @throws IOException IO failure.
*/
public void copyFromLocalFile(boolean delSrc, Path src, Path dst)
throws IOException {
@@ -2555,6 +2626,7 @@ public abstract class FileSystem extends Configured
* @param fsOutputFile path of output file
* @param tmpLocalFile path of local tmp file
* @throws IOException IO failure
+ * @return the path.
*/
public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile)
throws IOException {
@@ -2602,6 +2674,7 @@ public abstract class FileSystem extends Configured
/**
* Return the total size of all files in the filesystem.
* @throws IOException IO failure
+ * @return the number of path used.
*/
public long getUsed() throws IOException {
Path path = new Path("/");
@@ -2610,7 +2683,9 @@ public abstract class FileSystem extends Configured
/**
* Return the total size of all files from a specified path.
+ * @param path the path.
* @throws IOException IO failure
+ * @return the number of path content summary.
*/
public long getUsed(Path path) throws IOException {
return getContentSummary(path).getLength();
@@ -2633,6 +2708,7 @@ public abstract class FileSystem extends Configured
* Return the number of bytes that large input files should be optimally
* be split into to minimize I/O time.
* @deprecated use {@link #getDefaultBlockSize(Path)} instead
+ * @return default block size.
*/
@Deprecated
public long getDefaultBlockSize() {
@@ -2685,8 +2761,8 @@ public abstract class FileSystem extends Configured
* In some FileSystem implementations such as HDFS metadata
* synchronization is essential to guarantee consistency of read requests
* particularly in HA setting.
- * @throws IOException
- * @throws UnsupportedOperationException
+ * @throws IOException If an I/O error occurred.
+ * @throws UnsupportedOperationException if the operation is unsupported.
*/
public void msync() throws IOException, UnsupportedOperationException {
throw new UnsupportedOperationException(getClass().getCanonicalName() +
@@ -2762,6 +2838,8 @@ public abstract class FileSystem extends Configured
/**
* See {@link FileContext#fixRelativePart}.
+ * @param p the path.
+ * @return relative part.
*/
protected Path fixRelativePart(Path p) {
if (p.isUriPathAbsolute()) {
@@ -2773,6 +2851,18 @@ public abstract class FileSystem extends Configured
/**
* See {@link FileContext#createSymlink(Path, Path, boolean)}.
+ *
+ * @param target target path.
+ * @param link link.
+ * @param createParent create parent.
+ * @throws AccessControlException if access is denied.
+ * @throws FileAlreadyExistsException when the path does not exist.
+ * @throws FileNotFoundException when the path does not exist.
+ * @throws ParentNotDirectoryException if the parent path of dest is not
+ * a directory.
+ * @throws UnsupportedFileSystemException if there was no known implementation
+ * for the scheme.
+ * @throws IOException raised on errors performing I/O.
*/
public void createSymlink(final Path target, final Path link,
final boolean createParent) throws AccessControlException,
@@ -2786,8 +2876,14 @@ public abstract class FileSystem extends Configured
/**
* See {@link FileContext#getFileLinkStatus(Path)}.
- * @throws FileNotFoundException when the path does not exist
- * @throws IOException see specific implementation
+ *
+ * @param f the path.
+ * @throws AccessControlException if access is denied.
+ * @throws FileNotFoundException when the path does not exist.
+ * @throws IOException raised on errors performing I/O.
+ * @throws UnsupportedFileSystemException if there was no known implementation
+ * for the scheme.
+ * @return file status
*/
public FileStatus getFileLinkStatus(final Path f)
throws AccessControlException, FileNotFoundException,
@@ -2798,6 +2894,7 @@ public abstract class FileSystem extends Configured
/**
* See {@link AbstractFileSystem#supportsSymlinks()}.
+ * @return if support symlinkls true, not false.
*/
public boolean supportsSymlinks() {
return false;
@@ -2805,8 +2902,11 @@ public abstract class FileSystem extends Configured
/**
* See {@link FileContext#getLinkTarget(Path)}.
+ * @param f the path.
* @throws UnsupportedOperationException if the operation is unsupported
* (default outcome).
+ * @throws IOException IO failure.
+ * @return the path.
*/
public Path getLinkTarget(Path f) throws IOException {
// Supporting filesystems should override this method
@@ -2816,8 +2916,11 @@ public abstract class FileSystem extends Configured
/**
* See {@link AbstractFileSystem#getLinkTarget(Path)}.
+ * @param f the path.
* @throws UnsupportedOperationException if the operation is unsupported
* (default outcome).
+ * @throws IOException IO failure.
+ * @return the path.
*/
protected Path resolveLink(Path f) throws IOException {
// Supporting filesystems should override this method
@@ -3221,7 +3324,7 @@ public abstract class FileSystem extends Configured
/**
* Set the source path to satisfy storage policy.
* @param path The source path referring to either a directory or a file.
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
public void satisfyStoragePolicy(final Path path) throws IOException {
throw new UnsupportedOperationException(
@@ -3529,7 +3632,7 @@ public abstract class FileSystem extends Configured
* @param conf configuration
* @param key key to store/retrieve this FileSystem in the cache
* @return a cached or newly instantiated FileSystem.
- * @throws IOException
+ * @throws IOException If an I/O error occurred.
*/
private FileSystem getInternal(URI uri, Configuration conf, Key key)
throws IOException{
@@ -4024,6 +4127,7 @@ public abstract class FileSystem extends Configured
/**
* Get or create the thread-local data associated with the current thread.
+ * @return statistics data.
*/
public StatisticsData getThreadStatistics() {
StatisticsData data = threadData.get();
@@ -4382,6 +4486,7 @@ public abstract class FileSystem extends Configured
/**
* Return the FileSystem classes that have Statistics.
* @deprecated use {@link #getGlobalStorageStatistics()}
+ * @return statistics lists.
*/
@Deprecated
public static synchronized List getAllStatistics() {
@@ -4390,6 +4495,7 @@ public abstract class FileSystem extends Configured
/**
* Get the statistics for a particular file system.
+ * @param scheme scheme.
* @param cls the class to lookup
* @return a statistics object
* @deprecated use {@link #getGlobalStorageStatistics()}
@@ -4424,6 +4530,7 @@ public abstract class FileSystem extends Configured
/**
* Print all statistics for all file systems to {@code System.out}
+ * @throws IOException If an I/O error occurred.
*/
public static synchronized
void printStatistics() throws IOException {
@@ -4464,6 +4571,7 @@ public abstract class FileSystem extends Configured
/**
* Get the global storage statistics.
+ * @return global storage statistics.
*/
public static GlobalStorageStatistics getGlobalStorageStatistics() {
return GlobalStorageStatistics.INSTANCE;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystemLinkResolver.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystemLinkResolver.java
index 7eec0eb7cec..593495a1daa 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystemLinkResolver.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileSystemLinkResolver.java
@@ -38,8 +38,8 @@ public abstract class FileSystemLinkResolver {
* an UnresolvedLinkException if called on an unresolved {@link Path}.
* @param p Path on which to perform an operation
* @return Generic type returned by operation
- * @throws IOException
- * @throws UnresolvedLinkException
+ * @throws IOException raised on errors performing I/O.
+ * @throws UnresolvedLinkException unresolved link exception.
*/
abstract public T doCall(final Path p) throws IOException,
UnresolvedLinkException;
@@ -54,7 +54,7 @@ public abstract class FileSystemLinkResolver {
* @param p
* Resolved Target of path
* @return Generic type determined by implementation
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
abstract public T next(final FileSystem fs, final Path p) throws IOException;
@@ -66,7 +66,7 @@ public abstract class FileSystemLinkResolver {
* @param filesys FileSystem with which to try call
* @param path Path with which to try call
* @return Generic type determined by implementation
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public T resolve(final FileSystem filesys, final Path path)
throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
index 7400ca36daa..2af0a7b9e74 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FileUtil.java
@@ -162,6 +162,8 @@ public class FileUtil {
* (3) If dir is a normal file, it is deleted.
* (4) If dir is a normal directory, then dir and all its contents recursively
* are deleted.
+ * @param dir dir.
+ * @return fully delete status.
*/
public static boolean fullyDelete(final File dir) {
return fullyDelete(dir, false);
@@ -257,6 +259,9 @@ public class FileUtil {
* we return false, the directory may be partially-deleted.
* If dir is a symlink to a directory, all the contents of the actual
* directory pointed to by dir will be deleted.
+ *
+ * @param dir dir.
+ * @return fullyDeleteContents Status.
*/
public static boolean fullyDeleteContents(final File dir) {
return fullyDeleteContents(dir, false);
@@ -267,8 +272,11 @@ public class FileUtil {
* we return false, the directory may be partially-deleted.
* If dir is a symlink to a directory, all the contents of the actual
* directory pointed to by dir will be deleted.
+ *
+ * @param dir dir.
* @param tryGrantPermissions if 'true', try grant +rwx permissions to this
* and all the underlying directories before trying to delete their contents.
+ * @return fully delete contents status.
*/
public static boolean fullyDeleteContents(final File dir, final boolean tryGrantPermissions) {
if (tryGrantPermissions) {
@@ -311,7 +319,7 @@ public class FileUtil {
*
* @param fs {@link FileSystem} on which the path is present
* @param dir directory to recursively delete
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
* @deprecated Use {@link FileSystem#delete(Path, boolean)}
*/
@Deprecated
@@ -343,7 +351,17 @@ public class FileUtil {
}
}
- /** Copy files between FileSystems. */
+ /**
+ * Copy files between FileSystems.
+ * @param srcFS src fs.
+ * @param src src.
+ * @param dstFS dst fs.
+ * @param dst dst.
+ * @param deleteSource delete source.
+ * @param conf configuration.
+ * @return if copy success true, not false.
+ * @throws IOException raised on errors performing I/O.
+ */
public static boolean copy(FileSystem srcFS, Path src,
FileSystem dstFS, Path dst,
boolean deleteSource,
@@ -391,7 +409,19 @@ public class FileUtil {
return returnVal;
}
- /** Copy files between FileSystems. */
+ /**
+ * Copy files between FileSystems.
+ *
+ * @param srcFS srcFs.
+ * @param src src.
+ * @param dstFS dstFs.
+ * @param dst dst.
+ * @param deleteSource delete source.
+ * @param overwrite overwrite.
+ * @param conf configuration.
+ * @throws IOException raised on errors performing I/O.
+ * @return true if the operation succeeded.
+ */
public static boolean copy(FileSystem srcFS, Path src,
FileSystem dstFS, Path dst,
boolean deleteSource,
@@ -403,20 +433,21 @@ public class FileUtil {
/**
* Copy a file/directory tree within/between filesystems.
- *
+ *
* returns true if the operation succeeded. When deleteSource is true,
* this means "after the copy, delete(source) returned true"
* If the destination is a directory, and mkdirs (dest) fails,
* the operation will return false rather than raise any exception.
- *
+ *
* The overwrite flag is about overwriting files; it has no effect about
* handing an attempt to copy a file atop a directory (expect an IOException),
* or a directory over a path which contains a file (mkdir will fail, so
* "false").
- *
+ *
* The operation is recursive, and the deleteSource operation takes place
* as each subdirectory is copied. Therefore, if an operation fails partway
* through, the source tree may be partially deleted.
+ *
* @param srcFS source filesystem
* @param srcStatus status of source
* @param dstFS destination filesystem
@@ -471,7 +502,17 @@ public class FileUtil {
}
- /** Copy local files to a FileSystem. */
+ /**
+ * Copy local files to a FileSystem.
+ *
+ * @param src src.
+ * @param dstFS dstFs.
+ * @param dst dst.
+ * @param deleteSource delete source.
+ * @param conf configuration.
+ * @throws IOException raised on errors performing I/O.
+ * @return true if the operation succeeded.
+ */
public static boolean copy(File src,
FileSystem dstFS, Path dst,
boolean deleteSource,
@@ -514,7 +555,17 @@ public class FileUtil {
}
}
- /** Copy FileSystem files to local files. */
+ /**
+ * Copy FileSystem files to local files.
+ *
+ * @param srcFS srcFs.
+ * @param src src.
+ * @param dst dst.
+ * @param deleteSource delete source.
+ * @param conf configuration.
+ * @throws IOException raised on errors performing I/O.
+ * @return true if the operation succeeded.
+ */
public static boolean copy(FileSystem srcFS, Path src,
File dst, boolean deleteSource,
Configuration conf) throws IOException {
@@ -958,7 +1009,7 @@ public class FileUtil {
*
* @param inFile The tar file as input.
* @param untarDir The untar directory where to untar the tar file.
- * @throws IOException
+ * @throws IOException an exception occurred.
*/
public static void unTar(File inFile, File untarDir) throws IOException {
if (!untarDir.mkdirs()) {
@@ -1169,6 +1220,7 @@ public class FileUtil {
* @param target the target for symlink
* @param linkname the symlink
* @return 0 on success
+ * @throws IOException raised on errors performing I/O.
*/
public static int symLink(String target, String linkname) throws IOException{
@@ -1230,8 +1282,8 @@ public class FileUtil {
* @param filename the name of the file to change
* @param perm the permission string
* @return the exit code from the command
- * @throws IOException
- * @throws InterruptedException
+ * @throws IOException raised on errors performing I/O.
+ * @throws InterruptedException command interrupted.
*/
public static int chmod(String filename, String perm
) throws IOException, InterruptedException {
@@ -1245,7 +1297,7 @@ public class FileUtil {
* @param perm permission string
* @param recursive true, if permissions should be changed recursively
* @return the exit code from the command.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static int chmod(String filename, String perm, boolean recursive)
throws IOException {
@@ -1271,7 +1323,7 @@ public class FileUtil {
* @param file the file to change
* @param username the new user owner name
* @param groupname the new group owner name
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static void setOwner(File file, String username,
String groupname) throws IOException {
@@ -1288,7 +1340,7 @@ public class FileUtil {
* Platform independent implementation for {@link File#setReadable(boolean)}
* File#setReadable does not work as expected on Windows.
* @param f input file
- * @param readable
+ * @param readable readable.
* @return true on success, false otherwise
*/
public static boolean setReadable(File f, boolean readable) {
@@ -1309,7 +1361,7 @@ public class FileUtil {
* Platform independent implementation for {@link File#setWritable(boolean)}
* File#setWritable does not work as expected on Windows.
* @param f input file
- * @param writable
+ * @param writable writable.
* @return true on success, false otherwise
*/
public static boolean setWritable(File f, boolean writable) {
@@ -1333,7 +1385,7 @@ public class FileUtil {
* behavior on Windows as on Unix platforms. Creating, deleting or renaming
* a file within that folder will still succeed on Windows.
* @param f input file
- * @param executable
+ * @param executable executable.
* @return true on success, false otherwise
*/
public static boolean setExecutable(File f, boolean executable) {
@@ -1412,7 +1464,7 @@ public class FileUtil {
* of forking if group == other.
* @param f the file to change
* @param permission the new permissions
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static void setPermission(File f, FsPermission permission
) throws IOException {
@@ -1717,6 +1769,7 @@ public class FileUtil {
* wildcard path to return all jars from the directory to use in a classpath.
*
* @param path the path to the directory. The path may include the wildcard.
+ * @param useLocal use local.
* @return the list of jars as URLs, or an empty list if there are no jars, or
* the directory does not exist
*/
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
index 607aa263622..cdbe51e3307 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FilterFileSystem.java
@@ -233,7 +233,7 @@ public class FilterFileSystem extends FileSystem {
*
* @param src file name
* @param replication new replication
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
* @return true if successful;
* false if file does not exist or is a directory
*/
@@ -304,7 +304,7 @@ public class FilterFileSystem extends FileSystem {
* Set the current working directory for the given file system. All relative
* paths will be resolved relative to it.
*
- * @param newDir
+ * @param newDir new dir.
*/
@Override
public void setWorkingDirectory(Path newDir) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
index 7275b70227f..73258661ec1 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsShell.java
@@ -130,7 +130,7 @@ public class FsShell extends Configured implements Tool {
* Returns the current trash location for the path specified
* @param path to be deleted
* @return path to the trash
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Path getCurrentTrashDir(Path path) throws IOException {
return getTrash().getCurrentTrashDir(path);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java
index d392c7d765d..c4bc341bf4f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/FsStatus.java
@@ -35,24 +35,39 @@ public class FsStatus implements Writable {
private long used;
private long remaining;
- /** Construct a FsStatus object, using the specified statistics */
+ /**
+ * Construct a FsStatus object, using the specified statistics.
+ *
+ * @param capacity capacity.
+ * @param used used.
+ * @param remaining remaining.
+ */
public FsStatus(long capacity, long used, long remaining) {
this.capacity = capacity;
this.used = used;
this.remaining = remaining;
}
- /** Return the capacity in bytes of the file system */
+ /**
+ * Return the capacity in bytes of the file system.
+ * @return capacity.
+ */
public long getCapacity() {
return capacity;
}
- /** Return the number of bytes used on the file system */
+ /**
+ * Return the number of bytes used on the file system.
+ * @return used.
+ */
public long getUsed() {
return used;
}
- /** Return the number of remaining bytes on the file system */
+ /**
+ * Return the number of remaining bytes on the file system.
+ * @return remaining.
+ */
public long getRemaining() {
return remaining;
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java
index cb430ed3f62..c87444c6c87 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobExpander.java
@@ -56,9 +56,9 @@ public class GlobExpander {
* {a,b}/{c/\d} - {a,b}/c/d
*
*
- * @param filePattern
+ * @param filePattern file pattern.
* @return expanded file patterns
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static List expand(String filePattern) throws IOException {
List fullyExpanded = new ArrayList();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobalStorageStatistics.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobalStorageStatistics.java
index 30ce07a422e..d9433903444 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobalStorageStatistics.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/GlobalStorageStatistics.java
@@ -104,6 +104,8 @@ public enum GlobalStorageStatistics {
/**
* Get an iterator that we can use to iterate throw all the global storage
* statistics objects.
+ *
+ * @return StorageStatistics Iterator.
*/
synchronized public Iterator iterator() {
Entry first = map.firstEntry();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
index 7e12d0a11e9..1d64b0bcbe9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HarFileSystem.java
@@ -463,7 +463,7 @@ public class HarFileSystem extends FileSystem {
* @param start the start of the desired range in the contained file
* @param len the length of the desired range
* @return block locations for this segment of file
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public BlockLocation[] getFileBlockLocations(FileStatus file, long start,
@@ -525,7 +525,7 @@ public class HarFileSystem extends FileSystem {
* Combine the status stored in the index and the underlying status.
* @param h status stored in the index
* @return the combined file status
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
private FileStatus toFileStatus(HarStatus h) throws IOException {
final Path p = h.isDir ? archivePath : new Path(archivePath, h.partName);
@@ -635,7 +635,7 @@ public class HarFileSystem extends FileSystem {
* while creating a hadoop archive.
* @param f the path in har filesystem
* @return filestatus.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public FileStatus getFileStatus(Path f) throws IOException {
@@ -1104,7 +1104,7 @@ public class HarFileSystem extends FileSystem {
* @param start the start position in the part file
* @param length the length of valid data in the part file
* @param bufsize the buffer size
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public HarFSDataInputStream(FileSystem fs, Path p, long start,
long length, int bufsize) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java
index 855fbb04e59..1624c5d395a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HardLink.java
@@ -156,6 +156,7 @@ public class HardLink {
* Creates a hardlink.
* @param file - existing source file
* @param linkName - desired target link file
+ * @throws IOException raised on errors performing I/O.
*/
public static void createHardLink(File file, File linkName)
throws IOException {
@@ -177,6 +178,7 @@ public class HardLink {
* @param fileBaseNames - list of path-less file names, as returned by
* parentDir.list()
* @param linkDir - where the hardlinks should be put. It must already exist.
+ * @throws IOException raised on errors performing I/O.
*/
public static void createHardLinkMult(File parentDir, String[] fileBaseNames,
File linkDir) throws IOException {
@@ -204,6 +206,10 @@ public class HardLink {
/**
* Retrieves the number of links to the specified file.
+ *
+ * @param fileName file name.
+ * @throws IOException raised on errors performing I/O.
+ * @return link count.
*/
public static int getLinkCount(File fileName) throws IOException {
if (fileName == null) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HasFileDescriptor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HasFileDescriptor.java
index bcf325ceca5..a0e89d6aeac 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HasFileDescriptor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/HasFileDescriptor.java
@@ -33,7 +33,7 @@ public interface HasFileDescriptor {
/**
* @return the FileDescriptor
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public FileDescriptor getFileDescriptor() throws IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
index 5f266a7b825..f6c9d3c7cb0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalDirAllocator.java
@@ -78,8 +78,9 @@ public class LocalDirAllocator {
private final DiskValidator diskValidator;
- /**Create an allocator object
- * @param contextCfgItemName
+ /**
+ * Create an allocator object.
+ * @param contextCfgItemName contextCfgItemName.
*/
public LocalDirAllocator(String contextCfgItemName) {
this.contextCfgItemName = contextCfgItemName;
@@ -123,7 +124,7 @@ public class LocalDirAllocator {
* available disk)
* @param conf the Configuration object
* @return the complete path to the file on a local disk
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Path getLocalPathForWrite(String pathStr,
Configuration conf) throws IOException {
@@ -139,7 +140,7 @@ public class LocalDirAllocator {
* @param size the size of the file that is going to be written
* @param conf the Configuration object
* @return the complete path to the file on a local disk
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Path getLocalPathForWrite(String pathStr, long size,
Configuration conf) throws IOException {
@@ -156,7 +157,7 @@ public class LocalDirAllocator {
* @param conf the Configuration object
* @param checkWrite ensure that the path is writable
* @return the complete path to the file on a local disk
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Path getLocalPathForWrite(String pathStr, long size,
Configuration conf,
@@ -171,7 +172,7 @@ public class LocalDirAllocator {
* @param pathStr the requested file (this will be searched)
* @param conf the Configuration object
* @return the complete path to the file on a local disk
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Path getLocalPathToRead(String pathStr,
Configuration conf) throws IOException {
@@ -184,7 +185,7 @@ public class LocalDirAllocator {
* @param pathStr the path underneath the roots
* @param conf the configuration to look up the roots in
* @return all of the paths that exist under any of the roots
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Iterable getAllLocalPathsToRead(String pathStr,
Configuration conf
@@ -205,7 +206,7 @@ public class LocalDirAllocator {
* @param size the size of the file that is going to be written
* @param conf the Configuration object
* @return a unique temporary file
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public File createTmpFileForWrite(String pathStr, long size,
Configuration conf) throws IOException {
@@ -213,8 +214,9 @@ public class LocalDirAllocator {
return context.createTmpFileForWrite(pathStr, size, conf);
}
- /** Method to check whether a context is valid
- * @param contextCfgItemName
+ /**
+ * Method to check whether a context is valid.
+ * @param contextCfgItemName contextCfgItemName.
* @return true/false
*/
public static boolean isContextValid(String contextCfgItemName) {
@@ -224,9 +226,9 @@ public class LocalDirAllocator {
}
/**
- * Removes the context from the context config items
+ * Removes the context from the context config items.
*
- * @param contextCfgItemName
+ * @param contextCfgItemName contextCfgItemName.
*/
@Deprecated
@InterfaceAudience.LimitedPrivate({"MapReduce"})
@@ -236,8 +238,9 @@ public class LocalDirAllocator {
}
}
- /** We search through all the configured dirs for the file's existence
- * and return true when we find
+ /**
+ * We search through all the configured dirs for the file's existence
+ * and return true when we find.
* @param pathStr the requested file (this will be searched)
* @param conf the Configuration object
* @return true if files exist. false otherwise
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java
index c41190a7b36..590cbd9a49e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/LocalFileSystem.java
@@ -71,7 +71,11 @@ public class LocalFileSystem extends ChecksumFileSystem {
super(rawLocalFileSystem);
}
- /** Convert a path to a File. */
+ /**
+ * Convert a path to a File.
+ * @param path the path.
+ * @return file.
+ */
public File pathToFile(Path path) {
return ((RawLocalFileSystem)fs).pathToFile(path);
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32CastagnoliFileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32CastagnoliFileChecksum.java
index 5a4a6a97cc4..354e4a6b465 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32CastagnoliFileChecksum.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32CastagnoliFileChecksum.java
@@ -28,7 +28,13 @@ public class MD5MD5CRC32CastagnoliFileChecksum extends MD5MD5CRC32FileChecksum {
this(0, 0, null);
}
- /** Create a MD5FileChecksum */
+ /**
+ * Create a MD5FileChecksum.
+ *
+ * @param bytesPerCRC bytesPerCRC.
+ * @param crcPerBlock crcPerBlock.
+ * @param md5 md5.
+ */
public MD5MD5CRC32CastagnoliFileChecksum(int bytesPerCRC, long crcPerBlock, MD5Hash md5) {
super(bytesPerCRC, crcPerBlock, md5);
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java
index 3fdb7e98262..c5ac381f782 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32FileChecksum.java
@@ -44,7 +44,13 @@ public class MD5MD5CRC32FileChecksum extends FileChecksum {
this(0, 0, null);
}
- /** Create a MD5FileChecksum */
+ /**
+ * Create a MD5FileChecksum.
+ *
+ * @param bytesPerCRC bytesPerCRC.
+ * @param crcPerBlock crcPerBlock.
+ * @param md5 md5.
+ */
public MD5MD5CRC32FileChecksum(int bytesPerCRC, long crcPerBlock, MD5Hash md5) {
this.bytesPerCRC = bytesPerCRC;
this.crcPerBlock = crcPerBlock;
@@ -76,7 +82,10 @@ public class MD5MD5CRC32FileChecksum extends FileChecksum {
return WritableUtils.toByteArray(this);
}
- /** returns the CRC type */
+ /**
+ * returns the CRC type.
+ * @return data check sum type.
+ */
public DataChecksum.Type getCrcType() {
// default to the one that is understood by all releases.
return DataChecksum.Type.CRC32;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32GzipFileChecksum.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32GzipFileChecksum.java
index 5164d0200d2..f7996c86237 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32GzipFileChecksum.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MD5MD5CRC32GzipFileChecksum.java
@@ -28,7 +28,13 @@ public class MD5MD5CRC32GzipFileChecksum extends MD5MD5CRC32FileChecksum {
this(0, 0, null);
}
- /** Create a MD5FileChecksum */
+ /**
+ * Create a MD5FileChecksum.
+ *
+ * @param bytesPerCRC bytesPerCRC.
+ * @param crcPerBlock crcPerBlock.
+ * @param md5 md5.
+ */
public MD5MD5CRC32GzipFileChecksum(int bytesPerCRC, long crcPerBlock, MD5Hash md5) {
super(bytesPerCRC, crcPerBlock, md5);
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploader.java
index dcb76b50b34..5e4eda26c7f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploader.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploader.java
@@ -31,10 +31,11 @@ import org.apache.hadoop.fs.statistics.IOStatisticsSource;
/**
* MultipartUploader is an interface for copying files multipart and across
* multiple nodes.
- *
+ *
* The interface extends {@link IOStatisticsSource} so that there is no
* need to cast an instance to see if is a source of statistics.
* However, implementations MAY return null for their actual statistics.
+ *
*/
@InterfaceAudience.Public
@InterfaceStability.Unstable
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploaderBuilder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploaderBuilder.java
index 381bfaa07f6..e7b0865063e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploaderBuilder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/MultipartUploaderBuilder.java
@@ -25,34 +25,43 @@ import org.apache.hadoop.fs.permission.FsPermission;
/**
* Builder interface for Multipart readers.
- * @param
- * @param
+ * @param MultipartUploader Generic Type.
+ * @param MultipartUploaderBuilder Generic Type.
*/
public interface MultipartUploaderBuilder>
extends FSBuilder {
/**
* Set permission for the file.
+ * @param perm permission.
+ * @return B Generics Type.
*/
B permission(@Nonnull FsPermission perm);
/**
* Set the size of the buffer to be used.
+ * @param bufSize buffer size.
+ * @return B Generics Type.
*/
B bufferSize(int bufSize);
/**
* Set replication factor.
+ * @param replica replica.
+ * @return B Generics Type.
*/
B replication(short replica);
/**
* Set block size.
+ * @param blkSize blkSize.
+ * @return B Generics Type.
*/
B blockSize(long blkSize);
/**
* Create an FSDataOutputStream at the specified path.
+ * @return B Generics Type.
*/
B create();
@@ -60,16 +69,21 @@ public interface MultipartUploaderBuilder types) {
@@ -328,7 +368,7 @@ public class QuotaUsage {
/**
* return the header of with the StorageTypes.
*
- * @param storageTypes
+ * @param storageTypes storage types.
* @return storage header string
*/
public static String getStorageTypeHeader(List storageTypes) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
index edcc4a8b99e..468b37a885d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/RawLocalFileSystem.java
@@ -100,7 +100,12 @@ public class RawLocalFileSystem extends FileSystem {
}
}
- /** Convert a path to a File. */
+ /**
+ * Convert a path to a File.
+ *
+ * @param path the path.
+ * @return file.
+ */
public File pathToFile(Path path) {
checkPath(path);
if (!path.isAbsolute()) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Seekable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Seekable.java
index 919c857ffa6..f7546d58e60 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Seekable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Seekable.java
@@ -32,17 +32,27 @@ public interface Seekable {
* Seek to the given offset from the start of the file.
* The next read() will be from that location. Can't
* seek past the end of the file.
+ *
+ * @param pos offset from the start of the file.
+ * @throws IOException raised on errors performing I/O.
*/
void seek(long pos) throws IOException;
-
+
/**
* Return the current offset from the start of the file
+ *
+ * @return offset from the start of the file.
+ * @throws IOException raised on errors performing I/O.
*/
long getPos() throws IOException;
/**
- * Seeks a different copy of the data. Returns true if
+ * Seeks a different copy of the data. Returns true if
* found a new source, false otherwise.
+ *
+ * @param targetPos target position.
+ * @return true if found a new source, false otherwise.
+ * @throws IOException raised on errors performing I/O.
*/
@InterfaceAudience.Private
boolean seekToNewSource(long targetPos) throws IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Stat.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Stat.java
index 07f05132900..72a45309b17 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Stat.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Stat.java
@@ -73,8 +73,8 @@ public class Stat extends Shell {
}
/**
- * Whether Stat is supported on the current platform
- * @return
+ * Whether Stat is supported on the current platform.
+ * @return if is available true, not false.
*/
public static boolean isAvailable() {
if (Shell.LINUX || Shell.FREEBSD || Shell.MAC) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageStatistics.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageStatistics.java
index 2efe4566344..b4a86ab7812 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageStatistics.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/StorageStatistics.java
@@ -127,6 +127,7 @@ public abstract class StorageStatistics {
/**
* Get the name of this StorageStatistics object.
+ * @return name of this StorageStatistics object
*/
public String getName() {
return name;
@@ -145,12 +146,15 @@ public abstract class StorageStatistics {
*
* The values returned will depend on the type of FileSystem or FileContext
* object. The values do not necessarily reflect a snapshot in time.
+ *
+ * @return LongStatistic Iterator.
*/
public abstract Iterator getLongStatistics();
/**
* Get the value of a statistic.
*
+ * @param key key.
* @return null if the statistic is not being tracked or is not a
* long statistic. The value of the statistic, otherwise.
*/
@@ -159,6 +163,7 @@ public abstract class StorageStatistics {
/**
* Return true if a statistic is being tracked.
*
+ * @param key key.
* @return True only if the statistic is being tracked.
*/
public abstract boolean isTracked(String key);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java
index e29cb9a4e0e..a58a1a3cb8e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/Trash.java
@@ -43,6 +43,7 @@ public class Trash extends Configured {
/**
* Construct a trash can accessor.
* @param conf a Configuration
+ * @throws IOException raised on errors performing I/O.
*/
public Trash(Configuration conf) throws IOException {
this(FileSystem.get(conf), conf);
@@ -52,6 +53,7 @@ public class Trash extends Configured {
* Construct a trash can accessor for the FileSystem provided.
* @param fs the FileSystem
* @param conf a Configuration
+ * @throws IOException raised on errors performing I/O.
*/
public Trash(FileSystem fs, Configuration conf) throws IOException {
super(conf);
@@ -97,47 +99,74 @@ public class Trash extends Configured {
}
/**
- * Returns whether the trash is enabled for this filesystem
+ * Returns whether the trash is enabled for this filesystem.
+ *
+ * @return return if isEnabled true,not false.
*/
public boolean isEnabled() {
return trashPolicy.isEnabled();
}
/** Move a file or directory to the current trash directory.
+ *
+ * @param path the path.
* @return false if the item is already in the trash or trash is disabled
+ * @throws IOException raised on errors performing I/O.
*/
public boolean moveToTrash(Path path) throws IOException {
return trashPolicy.moveToTrash(path);
}
- /** Create a trash checkpoint. */
+ /**
+ * Create a trash checkpoint.
+ * @throws IOException raised on errors performing I/O.
+ */
public void checkpoint() throws IOException {
trashPolicy.createCheckpoint();
}
- /** Delete old checkpoint(s). */
+ /**
+ * Delete old checkpoint(s).
+ * @throws IOException raised on errors performing I/O.
+ */
public void expunge() throws IOException {
trashPolicy.deleteCheckpoint();
}
- /** Delete all trash immediately. */
+ /**
+ * Delete all trash immediately.
+ * @throws IOException raised on errors performing I/O.
+ */
public void expungeImmediately() throws IOException {
trashPolicy.createCheckpoint();
trashPolicy.deleteCheckpointsImmediately();
}
- /** get the current working directory */
+ /**
+ * get the current working directory.
+ *
+ * @throws IOException on raised on errors performing I/O.
+ * @return Trash Dir.
+ */
Path getCurrentTrashDir() throws IOException {
return trashPolicy.getCurrentTrashDir();
}
- /** get the configured trash policy */
+ /**
+ * get the configured trash policy.
+ *
+ * @return TrashPolicy.
+ */
TrashPolicy getTrashPolicy() {
return trashPolicy;
}
- /** Return a {@link Runnable} that periodically empties the trash of all
+ /**
+ * Return a {@link Runnable} that periodically empties the trash of all
* users, intended to be run by the superuser.
+ *
+ * @throws IOException on raised on errors performing I/O.
+ * @return Runnable.
*/
public Runnable getEmptier() throws IOException {
return trashPolicy.getEmptier();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java
index 64fb81be99e..35e51f9e1cf 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/TrashPolicy.java
@@ -60,27 +60,34 @@ public abstract class TrashPolicy extends Configured {
/**
* Returns whether the Trash Policy is enabled for this filesystem.
+ *
+ * @return if isEnabled true,not false.
*/
public abstract boolean isEnabled();
/**
* Move a file or directory to the current trash directory.
+ * @param path the path.
* @return false if the item is already in the trash or trash is disabled
+ * @throws IOException raised on errors performing I/O.
*/
public abstract boolean moveToTrash(Path path) throws IOException;
/**
- * Create a trash checkpoint.
+ * Create a trash checkpoint.
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void createCheckpoint() throws IOException;
/**
* Delete old trash checkpoint(s).
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void deleteCheckpoint() throws IOException;
/**
* Delete all checkpoints immediately, ie empty trash.
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void deleteCheckpointsImmediately() throws IOException;
@@ -94,6 +101,8 @@ public abstract class TrashPolicy extends Configured {
* TrashPolicy#getCurrentTrashDir(Path path).
* It returns the trash location correctly for the path specified no matter
* the path is in encryption zone or not.
+ *
+ * @return the path.
*/
public abstract Path getCurrentTrashDir();
@@ -102,7 +111,7 @@ public abstract class TrashPolicy extends Configured {
* Policy
* @param path path to be deleted
* @return current trash directory for the path to be deleted
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Path getCurrentTrashDir(Path path) throws IOException {
throw new UnsupportedOperationException();
@@ -111,6 +120,9 @@ public abstract class TrashPolicy extends Configured {
/**
* Return a {@link Runnable} that periodically empties the trash of all
* users, intended to be run by the superuser.
+ *
+ * @throws IOException raised on errors performing I/O.
+ * @return Runnable.
*/
public abstract Runnable getEmptier() throws IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java
index 3d65275e673..df878d99870 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/XAttrCodec.java
@@ -67,7 +67,7 @@ public enum XAttrCodec {
* the given string is treated as text.
* @param value string representation of the value.
* @return byte[] the value
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static byte[] decodeValue(String value) throws IOException {
byte[] result = null;
@@ -102,9 +102,9 @@ public enum XAttrCodec {
* while strings encoded as hexadecimal and base64 are prefixed with
* 0x and 0s, respectively.
* @param value byte[] value
- * @param encoding
+ * @param encoding encoding.
* @return String string representation of value
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static String encodeValue(byte[] value, XAttrCodec encoding)
throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractFSBuilderImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractFSBuilderImpl.java
index 9d3a46d6332..4256522b2a3 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractFSBuilderImpl.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractFSBuilderImpl.java
@@ -340,12 +340,14 @@ public abstract class
/**
* Get all the keys that are set as mandatory keys.
+ * @return mandatory keys.
*/
public Set getMandatoryKeys() {
return Collections.unmodifiableSet(mandatoryKeys);
}
/**
* Get all the keys that are set as optional keys.
+ * @return optional keys.
*/
public Set getOptionalKeys() {
return Collections.unmodifiableSet(optionalKeys);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractMultipartUploader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractMultipartUploader.java
index 416924e18d8..f9ae9f55cc1 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractMultipartUploader.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/AbstractMultipartUploader.java
@@ -127,7 +127,7 @@ public abstract class AbstractMultipartUploader implements MultipartUploader {
* {@inheritDoc}.
* @param path path to abort uploads under.
* @return a future to -1.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public CompletableFuture abortUploadsUnderPath(Path path)
throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureDataInputStreamBuilderImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureDataInputStreamBuilderImpl.java
index 70e39de7388..833c21ec1a6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureDataInputStreamBuilderImpl.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureDataInputStreamBuilderImpl.java
@@ -126,6 +126,9 @@ public abstract class FutureDataInputStreamBuilderImpl
/**
* Set the size of the buffer to be used.
+ *
+ * @param bufSize buffer size.
+ * @return FutureDataInputStreamBuilder.
*/
public FutureDataInputStreamBuilder bufferSize(int bufSize) {
bufferSize = bufSize;
@@ -137,6 +140,8 @@ public abstract class FutureDataInputStreamBuilderImpl
* This must be used after the constructor has been invoked to create
* the actual builder: it allows for subclasses to do things after
* construction.
+ *
+ * @return FutureDataInputStreamBuilder.
*/
public FutureDataInputStreamBuilder builder() {
return getThisBuilder();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureIOSupport.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureIOSupport.java
index f47e5f4fbfb..0a080426c2b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureIOSupport.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/FutureIOSupport.java
@@ -75,6 +75,8 @@ public final class FutureIOSupport {
* See {@link FutureIO#awaitFuture(Future, long, TimeUnit)}.
* @param future future to evaluate
* @param type of the result.
+ * @param timeout timeout.
+ * @param unit unit.
* @return the result, if all went well.
* @throws InterruptedIOException future was interrupted
* @throws IOException if something went wrong
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/MultipartUploaderBuilderImpl.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/MultipartUploaderBuilderImpl.java
index 5584e647849..665bcc6a956 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/MultipartUploaderBuilderImpl.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/impl/MultipartUploaderBuilderImpl.java
@@ -88,6 +88,9 @@ public abstract class MultipartUploaderBuilderImpl
/**
* Constructor.
+ *
+ * @param fileSystem fileSystem.
+ * @param p path.
*/
protected MultipartUploaderBuilderImpl(@Nonnull FileSystem fileSystem,
@Nonnull Path p) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java
index 25b9ba65904..260ee7e570c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/AclStatus.java
@@ -185,7 +185,8 @@ public class AclStatus {
/**
* Sets the permission for the file.
- * @param permission
+ * @param permission permission.
+ * @return Builder.
*/
public Builder setPermission(FsPermission permission) {
this.permission = permission;
@@ -224,6 +225,7 @@ public class AclStatus {
/**
* Get the effective permission for the AclEntry
* @param entry AclEntry to get the effective action
+ * @return FsAction.
*/
public FsAction getEffectivePermission(AclEntry entry) {
return getEffectivePermission(entry, permission);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsAction.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsAction.java
index 97dcf816c16..746e0e1e238 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsAction.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsAction.java
@@ -48,7 +48,8 @@ public enum FsAction {
/**
* Return true if this action implies that action.
- * @param that
+ * @param that FsAction that.
+ * @return if implies true,not false.
*/
public boolean implies(FsAction that) {
if (that != null) {
@@ -57,15 +58,26 @@ public enum FsAction {
return false;
}
- /** AND operation. */
+ /**
+ * AND operation.
+ * @param that FsAction that.
+ * @return FsAction.
+ */
public FsAction and(FsAction that) {
return vals[ordinal() & that.ordinal()];
}
- /** OR operation. */
+ /**
+ * OR operation.
+ * @param that FsAction that.
+ * @return FsAction.
+ */
public FsAction or(FsAction that) {
return vals[ordinal() | that.ordinal()];
}
- /** NOT operation. */
+ /**
+ * NOT operation.
+ * @return FsAction.
+ */
public FsAction not() {
return vals[7 - ordinal()];
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java
index 2bd6f1f3b91..ff3b4f6d65a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsCreateModes.java
@@ -35,7 +35,10 @@ public final class FsCreateModes extends FsPermission {
/**
* Create from unmasked mode and umask.
*
- * If the mode is already an FsCreateModes object, return it.
+ * @param mode mode.
+ * @param umask umask.
+ * @return If the mode is already
+ * an FsCreateModes object, return it.
*/
public static FsPermission applyUMask(FsPermission mode,
FsPermission umask) {
@@ -47,6 +50,10 @@ public final class FsCreateModes extends FsPermission {
/**
* Create from masked and unmasked modes.
+ *
+ * @param masked masked.
+ * @param unmasked unmasked.
+ * @return FsCreateModes.
*/
public static FsCreateModes create(FsPermission masked,
FsPermission unmasked) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java
index 51c113af270..33fed1d3039 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/FsPermission.java
@@ -56,7 +56,11 @@ public class FsPermission implements Writable, Serializable,
/** Maximum acceptable length of a permission string to parse */
public static final int MAX_PERMISSION_LENGTH = 10;
- /** Create an immutable {@link FsPermission} object. */
+ /**
+ * Create an immutable {@link FsPermission} object.
+ * @param permission permission.
+ * @return FsPermission.
+ */
public static FsPermission createImmutable(short permission) {
return new ImmutableFsPermission(permission);
}
@@ -85,7 +89,7 @@ public class FsPermission implements Writable, Serializable,
/**
* Construct by the given mode.
- * @param mode
+ * @param mode mode.
* @see #toShort()
*/
public FsPermission(short mode) { fromShort(mode); }
@@ -145,13 +149,19 @@ public class FsPermission implements Writable, Serializable,
this(new RawParser(mode).getPermission());
}
- /** Return user {@link FsAction}. */
+ /**
+ * @return Return user {@link FsAction}.
+ */
public FsAction getUserAction() {return useraction;}
- /** Return group {@link FsAction}. */
+ /**
+ * @return Return group {@link FsAction}.
+ */
public FsAction getGroupAction() {return groupaction;}
- /** Return other {@link FsAction}. */
+ /**
+ * @return Return other {@link FsAction}.
+ */
public FsAction getOtherAction() {return otheraction;}
private void set(FsAction u, FsAction g, FsAction o, boolean sb) {
@@ -180,6 +190,7 @@ public class FsPermission implements Writable, Serializable,
/**
* Get masked permission if exists.
+ * @return masked.
*/
public FsPermission getMasked() {
return null;
@@ -187,6 +198,7 @@ public class FsPermission implements Writable, Serializable,
/**
* Get unmasked permission if exists.
+ * @return unmasked.
*/
public FsPermission getUnmasked() {
return null;
@@ -194,6 +206,10 @@ public class FsPermission implements Writable, Serializable,
/**
* Create and initialize a {@link FsPermission} from {@link DataInput}.
+ *
+ * @param in data input.
+ * @throws IOException raised on errors performing I/O.
+ * @return FsPermission.
*/
public static FsPermission read(DataInput in) throws IOException {
FsPermission p = new FsPermission();
@@ -203,6 +219,7 @@ public class FsPermission implements Writable, Serializable,
/**
* Encode the object to a short.
+ * @return object to a short.
*/
public short toShort() {
int s = (stickyBit ? 1 << 9 : 0) |
@@ -301,6 +318,9 @@ public class FsPermission implements Writable, Serializable,
* '-' sets bits in the mask.
*
* Octal umask, the specified bits are set in the file mode creation mask.
+ *
+ * @param conf configuration.
+ * @return FsPermission UMask.
*/
public static FsPermission getUMask(Configuration conf) {
int umask = DEFAULT_UMASK;
@@ -346,7 +366,11 @@ public class FsPermission implements Writable, Serializable,
}
/**
- * Returns true if the file is encrypted or directory is in an encryption zone
+ * Returns true if the file is encrypted or directory is in an encryption zone.
+ *
+ * @return if the file is encrypted or directory
+ * is in an encryption zone true, not false.
+ *
* @deprecated Get encryption bit from the
* {@link org.apache.hadoop.fs.FileStatus} object.
*/
@@ -357,6 +381,9 @@ public class FsPermission implements Writable, Serializable,
/**
* Returns true if the file or directory is erasure coded.
+ *
+ * @return if the file or directory is
+ * erasure coded true, not false.
* @deprecated Get ec bit from the {@link org.apache.hadoop.fs.FileStatus}
* object.
*/
@@ -365,7 +392,11 @@ public class FsPermission implements Writable, Serializable,
return false;
}
- /** Set the user file creation mask (umask) */
+ /**
+ * Set the user file creation mask (umask)
+ * @param conf configuration.
+ * @param umask umask.
+ */
public static void setUMask(Configuration conf, FsPermission umask) {
conf.set(UMASK_LABEL, String.format("%1$03o", umask.toShort()));
}
@@ -379,6 +410,8 @@ public class FsPermission implements Writable, Serializable,
* {@link FsPermission#getDirDefault()} for directory, and use
* {@link FsPermission#getFileDefault()} for file.
* This method is kept for compatibility.
+ *
+ * @return Default FsPermission.
*/
public static FsPermission getDefault() {
return new FsPermission((short)00777);
@@ -386,6 +419,8 @@ public class FsPermission implements Writable, Serializable,
/**
* Get the default permission for directory.
+ *
+ * @return DirDefault FsPermission.
*/
public static FsPermission getDirDefault() {
return new FsPermission((short)00777);
@@ -393,6 +428,8 @@ public class FsPermission implements Writable, Serializable,
/**
* Get the default permission for file.
+ *
+ * @return FileDefault FsPermission.
*/
public static FsPermission getFileDefault() {
return new FsPermission((short)00666);
@@ -400,6 +437,8 @@ public class FsPermission implements Writable, Serializable,
/**
* Get the default permission for cache pools.
+ *
+ * @return CachePoolDefault FsPermission.
*/
public static FsPermission getCachePoolDefault() {
return new FsPermission((short)00755);
@@ -408,6 +447,7 @@ public class FsPermission implements Writable, Serializable,
/**
* Create a FsPermission from a Unix symbolic permission string
* @param unixSymbolicPermission e.g. "-rw-rw-rw-"
+ * @return FsPermission.
*/
public static FsPermission valueOf(String unixSymbolicPermission) {
if (unixSymbolicPermission == null) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java
index 3c3693f613b..be4beb506a6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/permission/PermissionStatus.java
@@ -39,7 +39,13 @@ public class PermissionStatus implements Writable {
WritableFactories.setFactory(PermissionStatus.class, FACTORY);
}
- /** Create an immutable {@link PermissionStatus} object. */
+ /**
+ * Create an immutable {@link PermissionStatus} object.
+ * @param user user.
+ * @param group group.
+ * @param permission permission.
+ * @return PermissionStatus.
+ */
public static PermissionStatus createImmutable(
String user, String group, FsPermission permission) {
return new PermissionStatus(user, group, permission) {
@@ -56,20 +62,35 @@ public class PermissionStatus implements Writable {
private PermissionStatus() {}
- /** Constructor */
+ /**
+ * Constructor.
+ *
+ * @param user user.
+ * @param group group.
+ * @param permission permission.
+ */
public PermissionStatus(String user, String group, FsPermission permission) {
username = user;
groupname = group;
this.permission = permission;
}
- /** Return user name */
+ /**
+ * Return user name.
+ * @return user name.
+ */
public String getUserName() {return username;}
- /** Return group name */
+ /**
+ * Return group name.
+ * @return group name.
+ */
public String getGroupName() {return groupname;}
- /** Return permission */
+ /**
+ * Return permission.
+ * @return FsPermission.
+ */
public FsPermission getPermission() {return permission;}
@Override
@@ -86,6 +107,9 @@ public class PermissionStatus implements Writable {
/**
* Create and initialize a {@link PermissionStatus} from {@link DataInput}.
+ * @param in data input.
+ * @throws IOException raised on errors performing I/O.
+ * @return PermissionStatus.
*/
public static PermissionStatus read(DataInput in) throws IOException {
PermissionStatus p = new PermissionStatus();
@@ -95,6 +119,11 @@ public class PermissionStatus implements Writable {
/**
* Serialize a {@link PermissionStatus} from its base components.
+ * @param out out.
+ * @param username username.
+ * @param groupname groupname.
+ * @param permission FsPermission.
+ * @throws IOException raised on errors performing I/O.
*/
public static void write(DataOutput out,
String username,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
index 0bdb47730a9..7858238ee71 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/Command.java
@@ -77,7 +77,11 @@ abstract public class Command extends Configured {
err = System.err;
}
- /** Constructor */
+ /**
+ * Constructor.
+ *
+ * @param conf configuration.
+ */
protected Command(Configuration conf) {
super(conf);
}
@@ -109,7 +113,7 @@ abstract public class Command extends Configured {
* Execute the command on the input path data. Commands can override to make
* use of the resolved filesystem.
* @param pathData The input path with resolved filesystem
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
protected void run(PathData pathData) throws IOException {
run(pathData.path);
@@ -136,11 +140,19 @@ abstract public class Command extends Configured {
return exitCode;
}
- /** sets the command factory for later use */
+ /**
+ * sets the command factory for later use.
+ * @param factory factory.
+ */
public void setCommandFactory(CommandFactory factory) {
this.commandFactory = factory;
}
- /** retrieves the command factory */
+
+ /**
+ * retrieves the command factory.
+ *
+ * @return command factory.
+ */
protected CommandFactory getCommandFactory() {
return this.commandFactory;
}
@@ -201,7 +213,7 @@ abstract public class Command extends Configured {
* IllegalArgumentException is thrown, the FsShell object will print the
* short usage of the command.
* @param args the command line arguments
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
protected void processOptions(LinkedList args) throws IOException {}
@@ -211,7 +223,7 @@ abstract public class Command extends Configured {
* {@link #expandArguments(LinkedList)} and pass the resulting list to
* {@link #processArguments(LinkedList)}
* @param args the list of argument strings
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
protected void processRawArguments(LinkedList args)
throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java
index 678225f81e0..69a418c1925 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/CommandWithDestination.java
@@ -119,6 +119,8 @@ abstract class CommandWithDestination extends FsCommand {
* owner, group and permission information of the source
* file will be preserved as far as target {@link FileSystem}
* implementation allows.
+ *
+ * @param preserve preserve.
*/
protected void setPreserve(boolean preserve) {
if (preserve) {
@@ -175,6 +177,7 @@ abstract class CommandWithDestination extends FsCommand {
* The last arg is expected to be a local path, if only one argument is
* given then the destination will be the current directory
* @param args is the list of arguments
+ * @throws IOException raised on errors performing I/O.
*/
protected void getLocalDestination(LinkedList args)
throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java
index 2071a16799a..da99ac21256 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/PathData.java
@@ -610,10 +610,11 @@ public class PathData implements Comparable {
/**
* Open a file for sequential IO.
- *
+ *
* This uses FileSystem.openFile() to request sequential IO;
* the file status is also passed in.
* Filesystems may use to optimize their IO.
+ *
* @return an input stream
* @throws IOException failure
*/
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java
index 5069d2d34e5..cd9bbe2bc88 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/BaseExpression.java
@@ -38,12 +38,18 @@ public abstract class BaseExpression implements Expression, Configurable {
private String[] usage = { "Not yet implemented" };
private String[] help = { "Not yet implemented" };
- /** Sets the usage text for this {@link Expression} */
+ /**
+ * Sets the usage text for this {@link Expression} .
+ * @param usage usage array.
+ */
protected void setUsage(String[] usage) {
this.usage = usage;
}
- /** Sets the help text for this {@link Expression} */
+ /**
+ * Sets the help text for this {@link Expression} .
+ * @param help help.
+ */
protected void setHelp(String[] help) {
this.help = help;
}
@@ -92,7 +98,10 @@ public abstract class BaseExpression implements Expression, Configurable {
/** Children of this expression. */
private LinkedList children = new LinkedList();
- /** Return the options to be used by this expression. */
+ /**
+ * Return the options to be used by this expression.
+ * @return options.
+ */
protected FindOptions getOptions() {
return (this.options == null) ? new FindOptions() : this.options;
}
@@ -265,6 +274,7 @@ public abstract class BaseExpression implements Expression, Configurable {
* @param depth
* current depth in the process directories
* @return FileStatus
+ * @throws IOException raised on errors performing I/O.
*/
protected FileStatus getFileStatus(PathData item, int depth)
throws IOException {
@@ -285,6 +295,8 @@ public abstract class BaseExpression implements Expression, Configurable {
* @param item
* PathData
* @return Path
+ *
+ * @throws IOException raised on errors performing I/O.
*/
protected Path getPath(PathData item) throws IOException {
return item.path;
@@ -295,6 +307,7 @@ public abstract class BaseExpression implements Expression, Configurable {
*
* @param item PathData
* @return FileSystem
+ * @throws IOException raised on errors performing I/O.
*/
protected FileSystem getFileSystem(PathData item) throws IOException {
return item.fs;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Expression.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Expression.java
index ccad631028c..353fe685cc9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Expression.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Expression.java
@@ -30,13 +30,15 @@ public interface Expression {
/**
* Set the options for this expression, called once before processing any
* items.
+ * @param options options.
+ * @throws IOException raised on errors performing I/O.
*/
public void setOptions(FindOptions options) throws IOException;
/**
* Prepares the expression for execution, called once after setting options
* and before processing any options.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void prepare() throws IOException;
@@ -46,13 +48,14 @@ public interface Expression {
* @param item {@link PathData} item to be processed
* @param depth distance of the item from the command line argument
* @return {@link Result} of applying the expression to the item
+ * @throws IOException raised on errors performing I/O.
*/
public Result apply(PathData item, int depth) throws IOException;
/**
* Finishes the expression, called once after processing all items.
*
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void finish() throws IOException;
@@ -76,15 +79,21 @@ public interface Expression {
/**
* Indicates whether this expression performs an action, i.e. provides output
* back to the user.
+ * @return if is action true, not false.
*/
public boolean isAction();
- /** Identifies the expression as an operator rather than a primary. */
+ /**
+ * Identifies the expression as an operator rather than a primary.
+ * @return if is operator true, not false.
+ */
public boolean isOperator();
/**
* Returns the precedence of this expression
* (only applicable to operators).
+ *
+ * @return precedence.
*/
public int getPrecedence();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/FindOptions.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/FindOptions.java
index b0f1be5c35c..c6051862305 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/FindOptions.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/FindOptions.java
@@ -264,6 +264,7 @@ public class FindOptions {
/**
* Return the {@link Configuration} return configuration {@link Configuration}
+ * @return configuration.
*/
public Configuration getConfiguration() {
return this.configuration;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Result.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Result.java
index 2ef9cb4a801..a242681acd0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Result.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/shell/find/Result.java
@@ -35,23 +35,36 @@ public final class Result {
this.descend = recurse;
}
- /** Should further directories be descended. */
+ /**
+ * Should further directories be descended.
+ * @return if is pass true,not false.
+ */
public boolean isDescend() {
return this.descend;
}
- /** Should processing continue. */
+ /**
+ * Should processing continue.
+ * @return if is pass true,not false.
+ */
public boolean isPass() {
return this.success;
}
- /** Returns the combination of this and another result. */
+ /**
+ * Returns the combination of this and another result.
+ * @param other other.
+ * @return result.
+ */
public Result combine(Result other) {
return new Result(this.isPass() && other.isPass(), this.isDescend()
&& other.isDescend());
}
- /** Negate this result. */
+ /**
+ * Negate this result.
+ * @return Result.
+ */
public Result negate() {
return new Result(!this.isPass(), this.isDescend());
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSnapshot.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSnapshot.java
index 63d37e97c98..88606eb4b30 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSnapshot.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSnapshot.java
@@ -53,7 +53,7 @@ import static org.apache.hadoop.fs.statistics.impl.IOStatisticsBinding.snapshotM
* deserialized. If for some reason this is required, use
* {@link #requiredSerializationClasses()} to get the list of classes
* used when deserializing instances of this object.
- *
+ *
*
* It is annotated for correct serializations with jackson2.
*
@@ -238,6 +238,8 @@ public final class IOStatisticsSnapshot
/**
* Serialize by converting each map to a TreeMap, and saving that
* to the stream.
+ * @param s ObjectOutputStream.
+ * @throws IOException raised on errors performing I/O.
*/
private synchronized void writeObject(ObjectOutputStream s)
throws IOException {
@@ -253,6 +255,10 @@ public final class IOStatisticsSnapshot
/**
* Deserialize by loading each TreeMap, and building concurrent
* hash maps from them.
+ *
+ * @param s ObjectInputStream.
+ * @throws IOException raised on errors performing I/O.
+ * @throws ClassNotFoundException class not found exception
*/
private void readObject(final ObjectInputStream s)
throws IOException, ClassNotFoundException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSupport.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSupport.java
index 75977047c0f..bb4d9a44587 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSupport.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/IOStatisticsSupport.java
@@ -71,6 +71,7 @@ public final class IOStatisticsSupport {
* Returns null if the source isn't of the write type
* or the return value of
* {@link IOStatisticsSource#getIOStatistics()} was null.
+ * @param source source.
* @return an IOStatistics instance or null
*/
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/MeanStatistic.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/MeanStatistic.java
index d9ff0c25c6a..369db496543 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/MeanStatistic.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/MeanStatistic.java
@@ -207,6 +207,7 @@ public final class MeanStatistic implements Serializable, Cloneable {
/**
* Add another MeanStatistic.
* @param other other value
+ * @return mean statistic.
*/
public synchronized MeanStatistic add(final MeanStatistic other) {
if (other.isEmpty()) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/impl/IOStatisticsBinding.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/impl/IOStatisticsBinding.java
index c45dfc21a1b..6a5d01fb3b0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/impl/IOStatisticsBinding.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/statistics/impl/IOStatisticsBinding.java
@@ -141,6 +141,7 @@ public final class IOStatisticsBinding {
/**
* Convert entry values to the string format used in logging.
*
+ * @param type of values.
* @param name statistic name
* @param value stat value
* @return formatted string
@@ -178,6 +179,8 @@ public final class IOStatisticsBinding {
/**
* A passthrough copy operation suitable for immutable
* types, including numbers.
+ *
+ * @param type of values.
* @param src source object
* @return the source object
*/
@@ -437,6 +440,7 @@ public final class IOStatisticsBinding {
* @param input input callable.
* @param return type.
* @return the result of the operation.
+ * @throws IOException raised on errors performing I/O.
*/
public static B trackDuration(
DurationTrackerFactory factory,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/DataBlocks.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/DataBlocks.java
index d9d3850ef4e..c70d0ee91e1 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/DataBlocks.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/DataBlocks.java
@@ -107,6 +107,7 @@ public final class DataBlocks {
* @param len number of bytes to be written.
* @throws NullPointerException for a null buffer
* @throws IndexOutOfBoundsException if indices are out of range
+ * @throws IOException raised on errors performing I/O.
*/
public static void validateWriteArgs(byte[] b, int off, int len)
throws IOException {
@@ -287,6 +288,7 @@ public final class DataBlocks {
* @param limit limit of the block.
* @param statistics stats to work with
* @return a new block.
+ * @throws IOException raised on errors performing I/O.
*/
public abstract DataBlock create(long index, int limit,
BlockUploadStatistics statistics)
@@ -482,6 +484,8 @@ public final class DataBlocks {
/**
* Inner close logic for subclasses to implement.
+ *
+ * @throws IOException raised on errors performing I/O.
*/
protected void innerClose() throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/audit/AuditingFunctions.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/audit/AuditingFunctions.java
index acc82766be1..9ad727f5a2d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/audit/AuditingFunctions.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/store/audit/AuditingFunctions.java
@@ -86,6 +86,8 @@ public final class AuditingFunctions {
* activates and deactivates the span around the inner one.
* @param auditSpan audit span
* @param operation operation
+ * @param Generics Type T.
+ * @param Generics Type R.
* @return a new invocation.
*/
public static FunctionRaisingIOE withinAuditSpan(
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ConfigUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ConfigUtil.java
index ead2a365f3a..c9ee5e232d9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ConfigUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ConfigUtil.java
@@ -48,7 +48,7 @@ public class ConfigUtil {
/**
* Add a link to the config for the specified mount table
* @param conf - add the link to this conf
- * @param mountTableName
+ * @param mountTableName mountTable.
* @param src - the src path name
* @param target - the target URI link
*/
@@ -71,9 +71,10 @@ public class ConfigUtil {
/**
* Add a LinkMergeSlash to the config for the specified mount table.
- * @param conf
- * @param mountTableName
- * @param target
+ *
+ * @param conf configuration.
+ * @param mountTableName mountTable.
+ * @param target target.
*/
public static void addLinkMergeSlash(Configuration conf,
final String mountTableName, final URI target) {
@@ -83,8 +84,9 @@ public class ConfigUtil {
/**
* Add a LinkMergeSlash to the config for the default mount table.
- * @param conf
- * @param target
+ *
+ * @param conf configuration.
+ * @param target targets.
*/
public static void addLinkMergeSlash(Configuration conf, final URI target) {
addLinkMergeSlash(conf, getDefaultMountTableName(conf), target);
@@ -92,9 +94,10 @@ public class ConfigUtil {
/**
* Add a LinkFallback to the config for the specified mount table.
- * @param conf
- * @param mountTableName
- * @param target
+ *
+ * @param conf configuration.
+ * @param mountTableName mountTable.
+ * @param target targets.
*/
public static void addLinkFallback(Configuration conf,
final String mountTableName, final URI target) {
@@ -104,8 +107,9 @@ public class ConfigUtil {
/**
* Add a LinkFallback to the config for the default mount table.
- * @param conf
- * @param target
+ *
+ * @param conf configuration.
+ * @param target targets.
*/
public static void addLinkFallback(Configuration conf, final URI target) {
addLinkFallback(conf, getDefaultMountTableName(conf), target);
@@ -113,9 +117,10 @@ public class ConfigUtil {
/**
* Add a LinkMerge to the config for the specified mount table.
- * @param conf
- * @param mountTableName
- * @param targets
+ *
+ * @param conf configuration.
+ * @param mountTableName mountTable.
+ * @param targets targets.
*/
public static void addLinkMerge(Configuration conf,
final String mountTableName, final URI[] targets) {
@@ -125,8 +130,9 @@ public class ConfigUtil {
/**
* Add a LinkMerge to the config for the default mount table.
- * @param conf
- * @param targets
+ *
+ * @param conf configuration.
+ * @param targets targets array.
*/
public static void addLinkMerge(Configuration conf, final URI[] targets) {
addLinkMerge(conf, getDefaultMountTableName(conf), targets);
@@ -134,6 +140,12 @@ public class ConfigUtil {
/**
* Add nfly link to configuration for the given mount table.
+ *
+ * @param conf configuration.
+ * @param mountTableName mount table.
+ * @param src src.
+ * @param settings settings.
+ * @param targets targets.
*/
public static void addLinkNfly(Configuration conf, String mountTableName,
String src, String settings, final String targets) {
@@ -144,12 +156,13 @@ public class ConfigUtil {
}
/**
+ * Add nfly link to configuration for the given mount table.
*
- * @param conf
- * @param mountTableName
- * @param src
- * @param settings
- * @param targets
+ * @param conf configuration.
+ * @param mountTableName mount table.
+ * @param src src.
+ * @param settings settings.
+ * @param targets targets.
*/
public static void addLinkNfly(Configuration conf, String mountTableName,
String src, String settings, final URI ... targets) {
@@ -202,6 +215,7 @@ public class ConfigUtil {
* Add config variable for homedir the specified mount table
* @param conf - add to this conf
* @param homedir - the home dir path starting with slash
+ * @param mountTableName - the mount table.
*/
public static void setHomeDirConf(final Configuration conf,
final String mountTableName, final String homedir) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/FsGetter.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/FsGetter.java
index c72baac25fb..f723f238e19 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/FsGetter.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/FsGetter.java
@@ -34,6 +34,10 @@ public class FsGetter {
/**
* Gets new file system instance of given uri.
+ * @param uri uri.
+ * @param conf configuration.
+ * @throws IOException raised on errors performing I/O.
+ * @return file system.
*/
public FileSystem getNewInstance(URI uri, Configuration conf)
throws IOException {
@@ -42,6 +46,11 @@ public class FsGetter {
/**
* Gets file system instance of given uri.
+ *
+ * @param uri uri.
+ * @param conf configuration.
+ * @throws IOException raised on errors performing I/O.
+ * @return FileSystem.
*/
public FileSystem get(URI uri, Configuration conf) throws IOException {
return FileSystem.get(uri, conf);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java
index a90084ad8f4..5360d55e106 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/InodeTree.java
@@ -59,7 +59,7 @@ import org.slf4j.LoggerFactory;
* @param is AbstractFileSystem or FileSystem
*
* The two main methods are
- * {@link #InodeTree(Configuration, String)} // constructor
+ * {@link #InodeTree(Configuration, String, URI, boolean)} // constructor
* {@link #resolve(String, boolean)}
*/
@@ -325,8 +325,8 @@ public abstract class InodeTree {
* A merge dir link is a merge (junction) of links to dirs:
* example : merge of 2 dirs
- * /users -> hdfs:nn1//users
- * /users -> hdfs:nn2//users
+ * /users -> hdfs:nn1//users
+ * /users -> hdfs:nn2//users
*
* For a merge, each target is checked to be dir when created but if target
* is changed later it is then ignored (a dir with null entries)
@@ -364,6 +364,8 @@ public abstract class InodeTree {
/**
* Get the target of the link. If a merge link then it returned
* as "," separated URI list.
+ *
+ * @return the path.
*/
public Path getTargetLink() {
StringBuilder result = new StringBuilder(targetDirLinkList[0].toString());
@@ -387,7 +389,7 @@ public abstract class InodeTree {
/**
* Get the instance of FileSystem to use, creating one if needed.
* @return An Initialized instance of T
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public T getTargetFileSystem() throws IOException {
if (targetFileSystem != null) {
@@ -500,7 +502,7 @@ public abstract class InodeTree {
/**
* The user of this class must subclass and implement the following
* 3 abstract methods.
- * @throws IOException
+ * @return Function.
*/
protected abstract Function initAndGetTargetFs();
@@ -591,14 +593,21 @@ public abstract class InodeTree {
}
/**
- * Create Inode Tree from the specified mount-table specified in Config
- * @param config - the mount table keys are prefixed with
- * FsConstants.CONFIG_VIEWFS_PREFIX
- * @param viewName - the name of the mount table - if null use defaultMT name
- * @throws UnsupportedFileSystemException
- * @throws URISyntaxException
- * @throws FileAlreadyExistsException
- * @throws IOException
+ * Create Inode Tree from the specified mount-table specified in Config.
+ *
+ * @param config the mount table keys are prefixed with
+ * FsConstants.CONFIG_VIEWFS_PREFIX.
+ * @param viewName the name of the mount table
+ * if null use defaultMT name.
+ * @param theUri heUri.
+ * @param initingUriAsFallbackOnNoMounts initingUriAsFallbackOnNoMounts.
+ * @throws UnsupportedFileSystemException file system for uri
is
+ * not found.
+ * @throws URISyntaxException if the URI does not have an authority
+ * it is badly formed.
+ * @throws FileAlreadyExistsException there is a file at the path specified
+ * or is discovered on one of its ancestors.
+ * @throws IOException raised on errors performing I/O.
*/
protected InodeTree(final Configuration config, final String viewName,
final URI theUri, boolean initingUriAsFallbackOnNoMounts)
@@ -872,9 +881,9 @@ public abstract class InodeTree {
/**
* Resolve the pathname p relative to root InodeDir.
* @param p - input path
- * @param resolveLastComponent
+ * @param resolveLastComponent resolveLastComponent.
* @return ResolveResult which allows further resolution of the remaining path
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public ResolveResult resolve(final String p, final boolean resolveLastComponent)
throws IOException {
@@ -996,14 +1005,14 @@ public abstract class InodeTree {
/**
* Walk through all regex mount points to see
* whether the path match any regex expressions.
- * E.g. link: ^/user/(?\\w+) => s3://$user.apache.com/_${user}
+ * E.g. link: ^/user/(?<username>\\w+) => s3://$user.apache.com/_${user}
* srcPath: is /user/hadoop/dir1
* resolveLastComponent: true
* then return value is s3://hadoop.apache.com/_hadoop
*
- * @param srcPath
- * @param resolveLastComponent
- * @return
+ * @param srcPath srcPath.
+ * @param resolveLastComponent resolveLastComponent.
+ * @return ResolveResult.
*/
protected ResolveResult tryResolveInRegexMountpoint(final String srcPath,
final boolean resolveLastComponent) {
@@ -1021,7 +1030,7 @@ public abstract class InodeTree {
* Build resolve result.
* Here's an example
* Mountpoint: fs.viewfs.mounttable.mt
- * .linkRegex.replaceresolveddstpath:_:-#.^/user/(?\w+)
+ * .linkRegex.replaceresolveddstpath:_:-#.^/user/(??<username>\w+)
* Value: /targetTestRoot/$username
* Dir path to test:
* viewfs://mt/user/hadoop_user1/hadoop_dir1
@@ -1030,6 +1039,10 @@ public abstract class InodeTree {
* targetOfResolvedPathStr: /targetTestRoot/hadoop-user1
* remainingPath: /hadoop_dir1
*
+ * @param resultKind resultKind.
+ * @param resolvedPathStr resolvedPathStr.
+ * @param targetOfResolvedPathStr targetOfResolvedPathStr.
+ * @param remainingPath remainingPath.
* @return targetFileSystem or null on exceptions.
*/
protected ResolveResult buildResolveResultForRegexMountPoint(
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/MountTableConfigLoader.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/MountTableConfigLoader.java
index bc2c3ea93c5..5fcd77cd291 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/MountTableConfigLoader.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/MountTableConfigLoader.java
@@ -38,6 +38,7 @@ public interface MountTableConfigLoader {
* a directory in the case of multiple versions of mount-table
* files(Recommended option).
* @param conf - Configuration object to add mount table.
+ * @throws IOException raised on errors performing I/O.
*/
void load(String mountTableConfigPath, Configuration conf)
throws IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
index 8f4631b0e83..da3955b125e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystem.java
@@ -107,6 +107,8 @@ public class ViewFileSystem extends FileSystem {
/**
* Gets file system creator instance.
+ *
+ * @return fs getter.
*/
protected FsGetter fsGetter() {
return new FsGetter();
@@ -273,7 +275,7 @@ public class ViewFileSystem extends FileSystem {
* {@link FileSystem#createFileSystem(URI, Configuration)}
*
* After this constructor is called initialize() is called.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public ViewFileSystem() throws IOException {
ugi = UserGroupInformation.getCurrentUser();
@@ -382,10 +384,10 @@ public class ViewFileSystem extends FileSystem {
}
/**
- * Convenience Constructor for apps to call directly
+ * Convenience Constructor for apps to call directly.
* @param theUri which must be that of ViewFileSystem
- * @param conf
- * @throws IOException
+ * @param conf conf configuration.
+ * @throws IOException raised on errors performing I/O.
*/
ViewFileSystem(final URI theUri, final Configuration conf)
throws IOException {
@@ -394,9 +396,9 @@ public class ViewFileSystem extends FileSystem {
}
/**
- * Convenience Constructor for apps to call directly
- * @param conf
- * @throws IOException
+ * Convenience Constructor for apps to call directly.
+ * @param conf configuration.
+ * @throws IOException raised on errors performing I/O.
*/
public ViewFileSystem(final Configuration conf) throws IOException {
this(FsConstants.VIEWFS_URI, conf);
@@ -1314,7 +1316,7 @@ public class ViewFileSystem extends FileSystem {
* Constants#CONFIG_VIEWFS_LINK_MERGE_SLASH} is supported and is a valid
* mount point. Else, throw NotInMountpointException.
*
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public long getUsed() throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemOverloadScheme.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemOverloadScheme.java
index e91b66512d5..1c25a9536e1 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemOverloadScheme.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemOverloadScheme.java
@@ -139,6 +139,8 @@ public class ViewFileSystemOverloadScheme extends ViewFileSystem {
/**
* Sets whether to add fallback automatically when no mount points found.
+ *
+ * @param addAutoFallbackOnNoMounts addAutoFallbackOnNoMounts.
*/
public void setSupportAutoAddingFallbackOnNoMounts(
boolean addAutoFallbackOnNoMounts) {
@@ -320,7 +322,8 @@ public class ViewFileSystemOverloadScheme extends ViewFileSystem {
*
* @param path - fs uri path
* @param conf - configuration
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
+ * @return file system.
*/
public FileSystem getRawFileSystem(Path path, Configuration conf)
throws IOException {
@@ -339,6 +342,11 @@ public class ViewFileSystemOverloadScheme extends ViewFileSystem {
/**
* Gets the mount path info, which contains the target file system and
* remaining path to pass to the target file system.
+ *
+ * @param path the path.
+ * @param conf configuration.
+ * @return mount path info.
+ * @throws IOException raised on errors performing I/O.
*/
public MountPathInfo getMountPathInfo(Path path,
Configuration conf) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemUtil.java
index f486a10b4c8..c9c6767097b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFileSystemUtil.java
@@ -44,7 +44,7 @@ public final class ViewFileSystemUtil {
/**
* Check if the FileSystem is a ViewFileSystem.
*
- * @param fileSystem
+ * @param fileSystem file system.
* @return true if the fileSystem is ViewFileSystem
*/
public static boolean isViewFileSystem(final FileSystem fileSystem) {
@@ -54,7 +54,7 @@ public final class ViewFileSystemUtil {
/**
* Check if the FileSystem is a ViewFileSystemOverloadScheme.
*
- * @param fileSystem
+ * @param fileSystem file system.
* @return true if the fileSystem is ViewFileSystemOverloadScheme
*/
public static boolean isViewFileSystemOverloadScheme(
@@ -101,6 +101,7 @@ public final class ViewFileSystemUtil {
* @param fileSystem - ViewFileSystem on which mount point exists
* @param path - URI for which FsStatus is requested
* @return Map of ViewFsMountPoint and FsStatus
+ * @throws IOException raised on errors performing I/O.
*/
public static Map getStatus(
FileSystem fileSystem, Path path) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java
index d98082fe5c1..5f54c9cdd06 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/fs/viewfs/ViewFs.java
@@ -909,7 +909,7 @@ public class ViewFs extends AbstractFileSystem {
*
* @param src file or directory path.
* @return storage policy for give file.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public BlockStoragePolicySpi getStoragePolicy(final Path src)
throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
index 041f8cab49c..edd15af534a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ActiveStandbyElector.java
@@ -91,6 +91,8 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
*
* Callback implementations are expected to manage their own
* timeouts (e.g. when making an RPC to a remote node).
+ *
+ * @throws ServiceFailedException Service Failed Exception.
*/
void becomeActive() throws ServiceFailedException;
@@ -119,6 +121,8 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
* If there is any fatal error (e.g. wrong ACL's, unexpected Zookeeper
* errors or Zookeeper persistent unavailability) then notifyFatalError is
* called to notify the app about it.
+ *
+ * @param errorMessage error message.
*/
void notifyFatalError(String errorMessage);
@@ -204,8 +208,12 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
* ZK connection
* @param app
* reference to callback interface object
- * @throws IOException
+ * @param maxRetryNum maxRetryNum.
+ * @throws IOException raised on errors performing I/O.
* @throws HadoopIllegalArgumentException
+ * if valid data is not supplied.
+ * @throws KeeperException
+ * other zookeeper operation errors.
*/
public ActiveStandbyElector(String zookeeperHostPorts,
int zookeeperSessionTimeout, String parentZnodeName, List acl,
@@ -245,8 +253,13 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
* reference to callback interface object
* @param failFast
* whether need to add the retry when establishing ZK connection.
+ * @param maxRetryNum max Retry Num
* @throws IOException
+ * raised on errors performing I/O.
* @throws HadoopIllegalArgumentException
+ * if valid data is not supplied.
+ * @throws KeeperException
+ * other zookeeper operation errors.
*/
public ActiveStandbyElector(String zookeeperHostPorts,
int zookeeperSessionTimeout, String parentZnodeName, List acl,
@@ -312,6 +325,8 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
/**
* @return true if the configured parent znode exists
+ * @throws IOException raised on errors performing I/O.
+ * @throws InterruptedException interrupted exception.
*/
public synchronized boolean parentZNodeExists()
throws IOException, InterruptedException {
@@ -327,6 +342,10 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
/**
* Utility function to ensure that the configured base znode exists.
* This recursively creates the znode as well as all of its parents.
+ *
+ * @throws IOException raised on errors performing I/O.
+ * @throws InterruptedException interrupted exception.
+ * @throws KeeperException other zookeeper operation errors.
*/
public synchronized void ensureParentZNode()
throws IOException, InterruptedException, KeeperException {
@@ -371,6 +390,9 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
* This recursively deletes everything within the znode as well as the
* parent znode itself. It should only be used when it's certain that
* no electors are currently participating in the election.
+ *
+ * @throws IOException raised on errors performing I/O.
+ * @throws InterruptedException interrupted exception.
*/
public synchronized void clearParentZNode()
throws IOException, InterruptedException {
@@ -435,6 +457,7 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
* @throws KeeperException
* other zookeeper operation errors
* @throws InterruptedException
+ * interrupted exception.
* @throws IOException
* when ZooKeeper connection could not be established
*/
@@ -684,7 +707,7 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
* inherit and mock out the zookeeper instance
*
* @return new zookeeper client instance
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
* @throws KeeperException zookeeper connectionloss exception
*/
protected synchronized ZooKeeper connectToZooKeeper() throws IOException,
@@ -714,7 +737,7 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
* inherit and pass in a mock object for zookeeper
*
* @return new zookeeper client instance
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
protected ZooKeeper createZooKeeper() throws IOException {
return new ZooKeeper(zkHostPort, zkSessionTimeout, watcher);
@@ -781,6 +804,8 @@ public class ActiveStandbyElector implements StatCallback, StringCallback {
* Sleep for the given number of milliseconds.
* This is non-static, and separated out, so that unit tests
* can override the behavior not to sleep.
+ *
+ * @param sleepMs sleep ms.
*/
@VisibleForTesting
protected void sleepFor(int sleepMs) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
index c6949e561e2..9eeaacd76bc 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAAdmin.java
@@ -326,6 +326,9 @@ public abstract class HAAdmin extends Configured implements Tool {
/**
* Return the serviceId as is, we are assuming it was
* given as a service address of form {@literal <}host:ipcport{@literal >}.
+ *
+ * @param serviceId serviceId.
+ * @return service addr.
*/
protected String getServiceAddr(String serviceId) {
return serviceId;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java
index 74a3d121a1a..56c848617ff 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceProtocol.java
@@ -118,7 +118,8 @@ public interface HAServiceProtocol {
/**
* Request service to transition to active state. No operation, if the
* service is already in active state.
- *
+ *
+ * @param reqInfo reqInfo.
* @throws ServiceFailedException
* if transition from standby to active fails.
* @throws AccessControlException
@@ -135,7 +136,8 @@ public interface HAServiceProtocol {
/**
* Request service to transition to standby state. No operation, if the
* service is already in standby state.
- *
+ *
+ * @param reqInfo reqInfo.
* @throws ServiceFailedException
* if transition from active to standby fails.
* @throws AccessControlException
@@ -153,6 +155,7 @@ public interface HAServiceProtocol {
* Request service to transition to observer state. No operation, if the
* service is already in observer state.
*
+ * @param reqInfo reqInfo.
* @throws ServiceFailedException
* if transition from standby to observer fails.
* @throws AccessControlException
@@ -176,6 +179,7 @@ public interface HAServiceProtocol {
* @throws IOException
* if other errors happen
* @see HAServiceStatus
+ * @return HAServiceStatus.
*/
@Idempotent
public HAServiceStatus getServiceStatus() throws AccessControlException,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceTarget.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceTarget.java
index 2e6b1fe1134..288a9dcbe0e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceTarget.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HAServiceTarget.java
@@ -93,6 +93,9 @@ public abstract class HAServiceTarget {
/**
* @return a proxy to connect to the target HA Service.
+ * @param timeoutMs timeout in milliseconds.
+ * @param conf Configuration.
+ * @throws IOException raised on errors performing I/O.
*/
public HAServiceProtocol getProxy(Configuration conf, int timeoutMs)
throws IOException {
@@ -115,7 +118,7 @@ public abstract class HAServiceTarget {
* returned proxy defaults to using {@link #getAddress()}, which means this
* method's behavior is identical to {@link #getProxy(Configuration, int)}.
*
- * @param conf Configuration
+ * @param conf configuration.
* @param timeoutMs timeout in milliseconds
* @return a proxy to connect to the target HA service for health monitoring
* @throws IOException if there is an error
@@ -154,6 +157,9 @@ public abstract class HAServiceTarget {
/**
* @return a proxy to the ZKFC which is associated with this HA service.
+ * @param conf configuration.
+ * @param timeoutMs timeout in milliseconds.
+ * @throws IOException raised on errors performing I/O.
*/
public ZKFCProtocol getZKFCProxy(Configuration conf, int timeoutMs)
throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java
index 7e90fb77a07..d222d52e373 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/HealthMonitor.java
@@ -184,6 +184,9 @@ public class HealthMonitor {
/**
* Connect to the service to be monitored. Stubbed out for easier testing.
+ *
+ * @throws IOException raised on errors performing I/O.
+ * @return HAServiceProtocol.
*/
protected HAServiceProtocol createProxy() throws IOException {
return targetToMonitor.getHealthMonitorProxy(conf, rpcTimeout, rpcConnectRetries);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java
index 87a80b868cd..d24d5630c59 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ha/ZKFailoverController.java
@@ -153,6 +153,8 @@ public abstract class ZKFailoverController {
* the ZKFC will do all of its work. This is so that multiple federated
* nameservices can run on the same ZK quorum without having to manually
* configure them to separate subdirectories.
+ *
+ * @return ScopeInsideParentNode.
*/
protected abstract String getScopeInsideParentNode();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java
index 51db21c185f..5f47ddb3392 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HtmlQuoting.java
@@ -80,6 +80,7 @@ public class HtmlQuoting {
* @param buffer the byte array to take the characters from
* @param off the index of the first byte to quote
* @param len the number of bytes to quote
+ * @throws IOException raised on errors performing I/O.
*/
public static void quoteHtmlChars(OutputStream output, byte[] buffer,
int off, int len) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
index 49807ac4b45..2928f885982 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/http/HttpServer2.java
@@ -270,6 +270,7 @@ public final class HttpServer2 implements FilterContainer {
* specifies the binding address, and the port specifies the
* listening port. Unspecified or zero port means that the server
* can listen to any port.
+ * @return Builder.
*/
public Builder addEndpoint(URI endpoint) {
endpoints.add(endpoint);
@@ -280,6 +281,9 @@ public final class HttpServer2 implements FilterContainer {
* Set the hostname of the http server. The host name is used to resolve the
* _HOST field in Kerberos principals. The hostname of the first listener
* will be used if the name is unspecified.
+ *
+ * @param hostName hostName.
+ * @return Builder.
*/
public Builder hostName(String hostName) {
this.hostName = hostName;
@@ -308,6 +312,9 @@ public final class HttpServer2 implements FilterContainer {
/**
* Specify whether the server should authorize the client in SSL
* connections.
+ *
+ * @param value value.
+ * @return Builder.
*/
public Builder needsClientAuth(boolean value) {
this.needsClientAuth = value;
@@ -332,6 +339,9 @@ public final class HttpServer2 implements FilterContainer {
/**
* Specify the SSL configuration to load. This API provides an alternative
* to keyStore/keyPassword/trustStore.
+ *
+ * @param sslCnf sslCnf.
+ * @return Builder.
*/
public Builder setSSLConf(Configuration sslCnf) {
this.sslConf = sslCnf;
@@ -898,8 +908,11 @@ public final class HttpServer2 implements FilterContainer {
/**
* Add default apps.
+ *
+ * @param parent contexthandlercollection.
* @param appDir The application directory
- * @throws IOException
+ * @param conf configuration.
+ * @throws IOException raised on errors performing I/O.
*/
protected void addDefaultApps(ContextHandlerCollection parent,
final String appDir, Configuration conf) throws IOException {
@@ -1180,6 +1193,12 @@ public final class HttpServer2 implements FilterContainer {
/**
* Define a filter for a context and set up default url mappings.
+ *
+ * @param ctx ctx.
+ * @param name name.
+ * @param classname classname.
+ * @param parameters parameters.
+ * @param urls urls.
*/
public static void defineFilter(ServletContextHandler ctx, String name,
String classname, Map parameters, String[] urls) {
@@ -1290,6 +1309,7 @@ public final class HttpServer2 implements FilterContainer {
/**
* Get the address that corresponds to a particular connector.
*
+ * @param index index.
* @return the corresponding address for the connector, or null if there's no
* such connector or the connector is not bounded or was closed.
*/
@@ -1309,6 +1329,9 @@ public final class HttpServer2 implements FilterContainer {
/**
* Set the min, max number of worker threads (simultaneous connections).
+ *
+ * @param min min.
+ * @param max max.
*/
public void setThreads(int min, int max) {
QueuedThreadPool pool = (QueuedThreadPool) webServer.getThreadPool();
@@ -1335,6 +1358,8 @@ public final class HttpServer2 implements FilterContainer {
/**
* Start the server. Does not wait for the server to start.
+ *
+ * @throws IOException raised on errors performing I/O.
*/
public void start() throws IOException {
try {
@@ -1509,7 +1534,9 @@ public final class HttpServer2 implements FilterContainer {
}
/**
- * stop the server
+ * stop the server.
+ *
+ * @throws Exception exception.
*/
public void stop() throws Exception {
MultiException exception = null;
@@ -1610,6 +1637,7 @@ public final class HttpServer2 implements FilterContainer {
* @param request the servlet request.
* @param response the servlet response.
* @return TRUE/FALSE based on the logic decribed above.
+ * @throws IOException raised on errors performing I/O.
*/
public static boolean isInstrumentationAccessAllowed(
ServletContext servletContext, HttpServletRequest request,
@@ -1631,9 +1659,11 @@ public final class HttpServer2 implements FilterContainer {
* Does the user sending the HttpServletRequest has the administrator ACLs? If
* it isn't the case, response will be modified to send an error to the user.
*
+ * @param servletContext servletContext.
+ * @param request request.
* @param response used to send the error response if user does not have admin access.
* @return true if admin-authorized, false otherwise
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static boolean hasAdministratorAccess(
ServletContext servletContext, HttpServletRequest request,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java
index eef74628e16..8cf82f42509 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/AbstractMapWritable.java
@@ -84,7 +84,10 @@ public abstract class AbstractMapWritable implements Writable, Configurable {
idToClassMap.put(id, clazz);
}
- /** Add a Class to the maps if it is not already present. */
+ /**
+ * Add a Class to the maps if it is not already present.
+ * @param clazz clazz.
+ */
protected synchronized void addToMap(Class> clazz) {
if (classToIdMap.containsKey(clazz)) {
return;
@@ -97,17 +100,28 @@ public abstract class AbstractMapWritable implements Writable, Configurable {
addToMap(clazz, id);
}
- /** @return the Class class for the specified id */
+ /**
+ * the Class class for the specified id.
+ * @param id id.
+ * @return the Class class for the specified id.
+ */
protected Class> getClass(byte id) {
return idToClassMap.get(id);
}
- /** @return the id for the specified Class */
+ /**
+ * get id.
+ * @return the id for the specified Class.
+ * @param clazz clazz.
+ */
protected byte getId(Class> clazz) {
return classToIdMap.containsKey(clazz) ? classToIdMap.get(clazz) : -1;
}
- /** Used by child copy constructors. */
+ /**
+ * Used by child copy constructors.
+ * @param other other.
+ */
protected synchronized void copy(Writable other) {
if (other != null) {
try {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayFile.java
index bee5fd2cb43..313caa63608 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayFile.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayFile.java
@@ -38,7 +38,15 @@ public class ArrayFile extends MapFile {
public static class Writer extends MapFile.Writer {
private LongWritable count = new LongWritable(0);
- /** Create the named file for values of the named class. */
+ /**
+ * Create the named file for values of the named class.
+ *
+ * @param conf configuration.
+ * @param fs file system.
+ * @param file file.
+ * @param valClass valClass.
+ * @throws IOException raised on errors performing I/O.
+ */
public Writer(Configuration conf, FileSystem fs,
String file, Class extends Writable> valClass)
throws IOException {
@@ -46,7 +54,17 @@ public class ArrayFile extends MapFile {
valueClass(valClass));
}
- /** Create the named file for values of the named class. */
+ /**
+ * Create the named file for values of the named class.
+ *
+ * @param conf configuration.
+ * @param fs file system.
+ * @param file file.
+ * @param valClass valClass.
+ * @param compress compress.
+ * @param progress progress.
+ * @throws IOException raised on errors performing I/O.
+ */
public Writer(Configuration conf, FileSystem fs,
String file, Class extends Writable> valClass,
CompressionType compress, Progressable progress)
@@ -58,7 +76,11 @@ public class ArrayFile extends MapFile {
progressable(progress));
}
- /** Append a value to the file. */
+ /**
+ * Append a value to the file.
+ * @param value value.
+ * @throws IOException raised on errors performing I/O.
+ */
public synchronized void append(Writable value) throws IOException {
super.append(count, value); // add to map
count.set(count.get()+1); // increment count
@@ -69,31 +91,59 @@ public class ArrayFile extends MapFile {
public static class Reader extends MapFile.Reader {
private LongWritable key = new LongWritable();
- /** Construct an array reader for the named file.*/
+ /**
+ * Construct an array reader for the named file.
+ * @param fs FileSystem.
+ * @param file file.
+ * @param conf configuration.
+ * @throws IOException raised on errors performing I/O.
+ */
public Reader(FileSystem fs, String file,
Configuration conf) throws IOException {
super(new Path(file), conf);
}
- /** Positions the reader before its n
th value. */
+ /**
+ * Positions the reader before its n
th value.
+ *
+ * @param n n key.
+ * @throws IOException raised on errors performing I/O.
+ */
public synchronized void seek(long n) throws IOException {
key.set(n);
seek(key);
}
- /** Read and return the next value in the file. */
+ /**
+ * Read and return the next value in the file.
+ *
+ * @param value value.
+ * @throws IOException raised on errors performing I/O.
+ * @return Writable.
+ */
public synchronized Writable next(Writable value) throws IOException {
return next(key, value) ? value : null;
}
- /** Returns the key associated with the most recent call to {@link
+ /**
+ * Returns the key associated with the most recent call to {@link
* #seek(long)}, {@link #next(Writable)}, or {@link
- * #get(long,Writable)}. */
+ * #get(long,Writable)}.
+ *
+ * @return key key.
+ * @throws IOException raised on errors performing I/O.
+ */
public synchronized long key() throws IOException {
return key.get();
}
- /** Return the n
th value in the file. */
+ /**
+ * Return the n
th value in the file.
+ * @param n n key.
+ * @param value value.
+ * @throws IOException raised on errors performing I/O.
+ * @return writable.
+ */
public synchronized Writable get(long n, Writable value)
throws IOException {
key.set(n);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayPrimitiveWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayPrimitiveWritable.java
index 2b6f3166bc2..ce7813e7483 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayPrimitiveWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ArrayPrimitiveWritable.java
@@ -106,7 +106,9 @@ public class ArrayPrimitiveWritable implements Writable {
/**
* Construct an instance of known type but no value yet
- * for use with type-specific wrapper classes
+ * for use with type-specific wrapper classes.
+ *
+ * @param componentType componentType.
*/
public ArrayPrimitiveWritable(Class> componentType) {
checkPrimitive(componentType);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BinaryComparable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BinaryComparable.java
index a32c44c8e50..a78ff8b6c58 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BinaryComparable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BinaryComparable.java
@@ -31,11 +31,15 @@ public abstract class BinaryComparable implements Comparable {
/**
* Return n st bytes 0..n-1 from {#getBytes()} are valid.
+ *
+ * @return length.
*/
public abstract int getLength();
/**
* Return representative byte array for this instance.
+ *
+ * @return getBytes.
*/
public abstract byte[] getBytes();
@@ -53,6 +57,11 @@ public abstract class BinaryComparable implements Comparable {
/**
* Compare bytes from {#getBytes()} to those provided.
+ *
+ * @param other other.
+ * @param off off.
+ * @param len len.
+ * @return compareBytes.
*/
public int compareTo(byte[] other, int off, int len) {
return WritableComparator.compareBytes(getBytes(), 0, getLength(),
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java
index 519fcd74cbb..91ea07d5de4 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BloomMapFile.java
@@ -259,7 +259,7 @@ public class BloomMapFile {
* probability of false positives.
* @param key key to check
* @return false iff key doesn't exist, true if key probably exists.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public boolean probablyHasKey(WritableComparable key) throws IOException {
if (bloomFilter == null) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java
index 0079079a792..789b866255b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BooleanWritable.java
@@ -35,21 +35,24 @@ public class BooleanWritable implements WritableComparable {
*/
public BooleanWritable() {};
- /**
+ /**
+ * @param value value.
*/
public BooleanWritable(boolean value) {
set(value);
}
/**
- * Set the value of the BooleanWritable
+ * Set the value of the BooleanWritable.
+ * @param value value.
*/
public void set(boolean value) {
this.value = value;
}
/**
- * Returns the value of the BooleanWritable
+ * Returns the value of the BooleanWritable.
+ * @return the value of the BooleanWritable.
*/
public boolean get() {
return value;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BoundedByteArrayOutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BoundedByteArrayOutputStream.java
index c27449d3618..542721f318d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BoundedByteArrayOutputStream.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BoundedByteArrayOutputStream.java
@@ -114,20 +114,28 @@ public class BoundedByteArrayOutputStream extends OutputStream {
this.currentPointer = startOffset;
}
- /** Return the current limit */
+ /**
+ * Return the current limit.
+ * @return limit.
+ */
public int getLimit() {
return limit;
}
- /** Returns the underlying buffer.
+ /**
+ * Returns the underlying buffer.
* Data is only valid to {@link #size()}.
+ * @return the underlying buffer.
*/
public byte[] getBuffer() {
return buffer;
}
- /** Returns the length of the valid data
+ /**
+ * Returns the length of the valid data
* currently in the buffer.
+ *
+ * @return the length of the valid data.
*/
public int size() {
return currentPointer - startOffset;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java
index ffcdea2c9a3..c4b88f4b5c9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ByteWritable.java
@@ -33,10 +33,16 @@ public class ByteWritable implements WritableComparable {
public ByteWritable(byte value) { set(value); }
- /** Set the value of this ByteWritable. */
+ /**
+ * Set the value of this ByteWritable.
+ * @param value value.
+ */
public void set(byte value) { this.value = value; }
- /** Return the value of this ByteWritable. */
+ /**
+ * Return the value of this ByteWritable.
+ * @return value bytes.
+ */
public byte get() { return value; }
@Override
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java
index c5538c9e56e..80a23f86ce8 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/BytesWritable.java
@@ -77,6 +77,8 @@ public class BytesWritable extends BinaryComparable
/**
* Get a copy of the bytes that is exactly the length of the data.
* See {@link #getBytes()} for faster access to the underlying array.
+ *
+ * @return copyBytes.
*/
public byte[] copyBytes() {
return Arrays.copyOf(bytes, size);
@@ -95,6 +97,7 @@ public class BytesWritable extends BinaryComparable
/**
* Get the data from the BytesWritable.
* @deprecated Use {@link #getBytes()} instead.
+ * @return data from the BytesWritable.
*/
@Deprecated
public byte[] get() {
@@ -112,6 +115,7 @@ public class BytesWritable extends BinaryComparable
/**
* Get the current size of the buffer.
* @deprecated Use {@link #getLength()} instead.
+ * @return current size of the buffer.
*/
@Deprecated
public int getSize() {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java
index 6550e1f2fde..c0315ab828c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/CompressedWritable.java
@@ -67,7 +67,11 @@ public abstract class CompressedWritable implements Writable {
}
}
- /** Subclasses implement this instead of {@link #readFields(DataInput)}. */
+ /**
+ * Subclasses implement this instead of {@link #readFields(DataInput)}.
+ * @param in data input.
+ * @throws IOException raised on errors performing I/O.
+ */
protected abstract void readFieldsCompressed(DataInput in)
throws IOException;
@@ -87,7 +91,12 @@ public abstract class CompressedWritable implements Writable {
out.write(compressed);
}
- /** Subclasses implement this instead of {@link #write(DataOutput)}. */
+ /**
+ * Subclasses implement this instead of {@link #write(DataOutput)}.
+ *
+ * @param out data output.
+ * @throws IOException raised on errors performing I/O.
+ */
protected abstract void writeCompressed(DataOutput out) throws IOException;
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputBuffer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputBuffer.java
index 63c41c2e750..85e905d8700 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputBuffer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataInputBuffer.java
@@ -140,12 +140,23 @@ public class DataInputBuffer extends DataInputStream {
this.buffer = buffer;
}
- /** Resets the data that the buffer reads. */
+ /**
+ * Resets the data that the buffer reads.
+ *
+ * @param input input.
+ * @param length length.
+ */
public void reset(byte[] input, int length) {
buffer.reset(input, 0, length);
}
- /** Resets the data that the buffer reads. */
+ /**
+ * Resets the data that the buffer reads.
+ *
+ * @param input input.
+ * @param start start.
+ * @param length length.
+ */
public void reset(byte[] input, int start, int length) {
buffer.reset(input, start, length);
}
@@ -154,12 +165,18 @@ public class DataInputBuffer extends DataInputStream {
return buffer.getData();
}
- /** Returns the current position in the input. */
+ /**
+ * Returns the current position in the input.
+ *
+ * @return position.
+ */
public int getPosition() { return buffer.getPosition(); }
/**
* Returns the index one greater than the last valid character in the input
* stream buffer.
+ *
+ * @return length.
*/
public int getLength() { return buffer.getLength(); }
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataOutputBuffer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataOutputBuffer.java
index 1d86b89701c..4c1fa41e149 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataOutputBuffer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/DataOutputBuffer.java
@@ -99,27 +99,45 @@ public class DataOutputBuffer extends DataOutputStream {
this.buffer = buffer;
}
- /** Returns the current contents of the buffer.
+ /**
+ * Returns the current contents of the buffer.
* Data is only valid to {@link #getLength()}.
+ *
+ * @return data byte.
*/
public byte[] getData() { return buffer.getData(); }
- /** Returns the length of the valid data currently in the buffer. */
+ /**
+ * Returns the length of the valid data currently in the buffer.
+ * @return length.
+ */
public int getLength() { return buffer.getLength(); }
- /** Resets the buffer to empty. */
+ /**
+ * Resets the buffer to empty.
+ * @return DataOutputBuffer.
+ */
public DataOutputBuffer reset() {
this.written = 0;
buffer.reset();
return this;
}
- /** Writes bytes from a DataInput directly into the buffer. */
+ /**
+ * Writes bytes from a DataInput directly into the buffer.
+ * @param in data input.
+ * @param length length.
+ * @throws IOException raised on errors performing I/O.
+ */
public void write(DataInput in, int length) throws IOException {
buffer.write(in, length);
}
- /** Write to a file stream */
+ /**
+ * Write to a file stream.
+ * @param out OutputStream.
+ * @throws IOException raised on errors performing I/O.
+ */
public void writeTo(OutputStream out) throws IOException {
buffer.writeTo(out);
}
@@ -128,6 +146,10 @@ public class DataOutputBuffer extends DataOutputStream {
* Overwrite an integer into the internal buffer. Note that this call can only
* be used to overwrite existing data in the buffer, i.e., buffer#count cannot
* be increased, and DataOutputStream#written cannot be increased.
+ *
+ * @param v v.
+ * @param offset offset.
+ * @throws IOException raised on errors performing I/O.
*/
public void writeInt(int v, int offset) throws IOException {
Preconditions.checkState(offset + 4 <= buffer.getLength());
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java
index be86159519b..4b1dc7513d0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/EnumSetWritable.java
@@ -64,8 +64,8 @@ public class EnumSetWritable> extends AbstractCollection
* the argument value 's size is bigger than zero, the argument
* elementType is not be used.
*
- * @param value
- * @param elementType
+ * @param value enumSet value.
+ * @param elementType elementType.
*/
public EnumSetWritable(EnumSet value, Class elementType) {
set(value, elementType);
@@ -75,7 +75,7 @@ public class EnumSetWritable> extends AbstractCollection
* Construct a new EnumSetWritable. Argument value should not be null
* or empty.
*
- * @param value
+ * @param value enumSet value.
*/
public EnumSetWritable(EnumSet value) {
this(value, null);
@@ -88,8 +88,8 @@ public class EnumSetWritable> extends AbstractCollection
* null. If the argument value 's size is bigger than zero, the
* argument elementType is not be used.
*
- * @param value
- * @param elementType
+ * @param value enumSet Value.
+ * @param elementType elementType.
*/
public void set(EnumSet value, Class elementType) {
if ((value == null || value.size() == 0)
@@ -106,7 +106,10 @@ public class EnumSetWritable> extends AbstractCollection
}
}
- /** Return the value of this EnumSetWritable. */
+ /**
+ * Return the value of this EnumSetWritable.
+ * @return EnumSet.
+ */
public EnumSet get() {
return value;
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java
index 367fc946da1..864bb8752f5 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/FloatWritable.java
@@ -33,10 +33,16 @@ public class FloatWritable implements WritableComparable {
public FloatWritable(float value) { set(value); }
- /** Set the value of this FloatWritable. */
+ /**
+ * Set the value of this FloatWritable.
+ * @param value value.
+ */
public void set(float value) { this.value = value; }
- /** Return the value of this FloatWritable. */
+ /**
+ * Return the value of this FloatWritable.
+ * @return value.
+ */
public float get() { return value; }
@Override
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java
index 7cfeed7f931..6de927467e4 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/GenericWritable.java
@@ -90,7 +90,7 @@ public abstract class GenericWritable implements Writable, Configurable {
/**
* Set the instance that is wrapped.
*
- * @param obj
+ * @param obj input obj.
*/
public void set(Writable obj) {
instance = obj;
@@ -109,6 +109,7 @@ public abstract class GenericWritable implements Writable, Configurable {
/**
* Return the wrapped instance.
+ * @return the wrapped instance.
*/
public Writable get() {
return instance;
@@ -145,6 +146,7 @@ public abstract class GenericWritable implements Writable, Configurable {
/**
* Return all classes that may be wrapped. Subclasses should implement this
* to return a constant array of classes.
+ * @return all classes that may be wrapped.
*/
abstract protected Class extends Writable>[] getTypes();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java
index 121af64b011..f0a9b0b6952 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IOUtils.java
@@ -59,7 +59,8 @@ public class IOUtils {
* @param out OutputStream to write to
* @param buffSize the size of the buffer
* @param close whether or not close the InputStream and
- * OutputStream at the end. The streams are closed in the finally clause.
+ * OutputStream at the end. The streams are closed in the finally clause.
+ * @throws IOException raised on errors performing I/O.
*/
public static void copyBytes(InputStream in, OutputStream out,
int buffSize, boolean close)
@@ -85,7 +86,8 @@ public class IOUtils {
*
* @param in InputStrem to read from
* @param out OutputStream to write to
- * @param buffSize the size of the buffer
+ * @param buffSize the size of the buffer.
+ * @throws IOException raised on errors performing I/O.
*/
public static void copyBytes(InputStream in, OutputStream out, int buffSize)
throws IOException {
@@ -107,7 +109,8 @@ public class IOUtils {
*
* @param in InputStrem to read from
* @param out OutputStream to write to
- * @param conf the Configuration object
+ * @param conf the Configuration object.
+ * @throws IOException raised on errors performing I/O.
*/
public static void copyBytes(InputStream in, OutputStream out, Configuration conf)
throws IOException {
@@ -123,6 +126,7 @@ public class IOUtils {
* @param conf the Configuration object
* @param close whether or not close the InputStream and
* OutputStream at the end. The streams are closed in the finally clause.
+ * @throws IOException raised on errors performing I/O.
*/
public static void copyBytes(InputStream in, OutputStream out, Configuration conf, boolean close)
throws IOException {
@@ -181,6 +185,7 @@ public class IOUtils {
* @param off - offset within buf
* @param len - amount of data to be read
* @return number of bytes read
+ * @throws IOException raised on errors performing I/O.
*/
public static int wrappedReadForCompressedData(InputStream is, byte[] buf,
int off, int len) throws IOException {
@@ -407,6 +412,7 @@ public class IOUtils {
* once the sync is done.
* Borrowed from Uwe Schindler in LUCENE-5588
* @param fileToSync the file to fsync
+ * @throws IOException raised on errors performing I/O.
*/
public static void fsync(File fileToSync) throws IOException {
if (!fileToSync.exists()) {
@@ -440,7 +446,7 @@ public class IOUtils {
* @param isDir if true, the given file is a directory (Channel should be
* opened for read and ignore IOExceptions, because not all file
* systems and operating systems allow to fsync on a directory)
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static void fsync(FileChannel channel, boolean isDir)
throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/InputBuffer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/InputBuffer.java
index 0d084b8396f..686b359f57d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/InputBuffer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/InputBuffer.java
@@ -75,20 +75,35 @@ public class InputBuffer extends FilterInputStream {
this.buffer = buffer;
}
- /** Resets the data that the buffer reads. */
+ /**
+ * Resets the data that the buffer reads.
+ * @param input input.
+ * @param length length.
+ */
public void reset(byte[] input, int length) {
buffer.reset(input, 0, length);
}
- /** Resets the data that the buffer reads. */
+ /**
+ * Resets the data that the buffer reads.
+ * @param input input.
+ * @param start start.
+ * @param length length.
+ */
public void reset(byte[] input, int start, int length) {
buffer.reset(input, start, length);
}
- /** Returns the current position in the input. */
+ /**
+ * Returns the current position in the input.
+ * @return the current position in the input.
+ */
public int getPosition() { return buffer.getPosition(); }
- /** Returns the length of the input. */
+ /**
+ * Returns the length of the input.
+ * @return length of the input.
+ */
public int getLength() { return buffer.getLength(); }
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java
index f656d028cb0..ffcf93946d0 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/IntWritable.java
@@ -36,10 +36,16 @@ public class IntWritable implements WritableComparable {
public IntWritable(int value) { set(value); }
- /** Set the value of this IntWritable. */
+ /**
+ * Set the value of this IntWritable.
+ * @param value input value.
+ */
public void set(int value) { this.value = value; }
- /** Return the value of this IntWritable. */
+ /**
+ * Return the value of this IntWritable.
+ * @return value of this IntWritable.
+ */
public int get() { return value; }
@Override
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java
index b77ca6781a6..9262af87bc2 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/LongWritable.java
@@ -36,10 +36,16 @@ public class LongWritable implements WritableComparable {
public LongWritable(long value) { set(value); }
- /** Set the value of this LongWritable. */
+ /**
+ * Set the value of this LongWritable.
+ * @param value value.
+ */
public void set(long value) { this.value = value; }
- /** Return the value of this LongWritable. */
+ /**
+ * Return the value of this LongWritable.
+ * @return value of this LongWritable.
+ */
public long get() { return value; }
@Override
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java
index 99c17acdd43..edfcf6e1e77 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MD5Hash.java
@@ -54,12 +54,18 @@ public class MD5Hash implements WritableComparable {
this.digest = new byte[MD5_LEN];
}
- /** Constructs an MD5Hash from a hex string. */
+ /**
+ * Constructs an MD5Hash from a hex string.
+ * @param hex input hex.
+ */
public MD5Hash(String hex) {
setDigest(hex);
}
- /** Constructs an MD5Hash with a specified value. */
+ /**
+ * Constructs an MD5Hash with a specified value.
+ * @param digest digest.
+ */
public MD5Hash(byte[] digest) {
if (digest.length != MD5_LEN)
throw new IllegalArgumentException("Wrong length: " + digest.length);
@@ -72,7 +78,12 @@ public class MD5Hash implements WritableComparable {
in.readFully(digest);
}
- /** Constructs, reads and returns an instance. */
+ /**
+ * Constructs, reads and returns an instance.
+ * @param in in.
+ * @throws IOException raised on errors performing I/O.
+ * @return MD5Hash.
+ */
public static MD5Hash read(DataInput in) throws IOException {
MD5Hash result = new MD5Hash();
result.readFields(in);
@@ -85,21 +96,32 @@ public class MD5Hash implements WritableComparable {
out.write(digest);
}
- /** Copy the contents of another instance into this instance. */
+ /**
+ * Copy the contents of another instance into this instance.
+ * @param that that.
+ */
public void set(MD5Hash that) {
System.arraycopy(that.digest, 0, this.digest, 0, MD5_LEN);
}
- /** Returns the digest bytes. */
+ /**
+ * Returns the digest bytes.
+ * @return digest.
+ */
public byte[] getDigest() { return digest; }
- /** Construct a hash value for a byte array. */
+ /**
+ * Construct a hash value for a byte array.
+ * @param data data.
+ * @return MD5Hash.
+ */
public static MD5Hash digest(byte[] data) {
return digest(data, 0, data.length);
}
/**
- * Create a thread local MD5 digester
+ * Create a thread local MD5 digester.
+ * @return MessageDigest.
*/
public static MessageDigest getDigester() {
MessageDigest digester = DIGESTER_FACTORY.get();
@@ -107,7 +129,12 @@ public class MD5Hash implements WritableComparable {
return digester;
}
- /** Construct a hash value for the content from the InputStream. */
+ /**
+ * Construct a hash value for the content from the InputStream.
+ * @param in input stream.
+ * @return MD5Hash.
+ * @throws IOException raised on errors performing I/O.
+ */
public static MD5Hash digest(InputStream in) throws IOException {
final byte[] buffer = new byte[4*1024];
@@ -119,7 +146,13 @@ public class MD5Hash implements WritableComparable {
return new MD5Hash(digester.digest());
}
- /** Construct a hash value for a byte array. */
+ /**
+ * Construct a hash value for a byte array.
+ * @param data data.
+ * @param start start.
+ * @param len len.
+ * @return MD5Hash.
+ */
public static MD5Hash digest(byte[] data, int start, int len) {
byte[] digest;
MessageDigest digester = getDigester();
@@ -128,7 +161,13 @@ public class MD5Hash implements WritableComparable {
return new MD5Hash(digest);
}
- /** Construct a hash value for an array of byte array. */
+ /**
+ * Construct a hash value for an array of byte array.
+ * @param dataArr dataArr.
+ * @param start start.
+ * @param len len.
+ * @return MD5Hash.
+ */
public static MD5Hash digest(byte[][] dataArr, int start, int len) {
byte[] digest;
MessageDigest digester = getDigester();
@@ -139,17 +178,28 @@ public class MD5Hash implements WritableComparable {
return new MD5Hash(digest);
}
- /** Construct a hash value for a String. */
+ /**
+ * Construct a hash value for a String.
+ * @param string string.
+ * @return MD5Hash.
+ */
public static MD5Hash digest(String string) {
return digest(UTF8.getBytes(string));
}
- /** Construct a hash value for a String. */
+ /**
+ * Construct a hash value for a String.
+ * @param utf8 utf8.
+ * @return MD5Hash.
+ */
public static MD5Hash digest(UTF8 utf8) {
return digest(utf8.getBytes(), 0, utf8.getLength());
}
- /** Construct a half-sized version of this MD5. Fits in a long **/
+ /**
+ * Construct a half-sized version of this MD5. Fits in a long.
+ * @return halfDigest.
+ */
public long halfDigest() {
long value = 0;
for (int i = 0; i < 8; i++)
@@ -226,7 +276,10 @@ public class MD5Hash implements WritableComparable {
return buf.toString();
}
- /** Sets the digest value from a hex string. */
+ /**
+ * Sets the digest value from a hex string.
+ * @param hex hex.
+ */
public void setDigest(String hex) {
if (hex.length() != MD5_LEN*2)
throw new IllegalArgumentException("Wrong length: " + hex.length());
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
index 51db0b3f0af..7b3cd78e3cc 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MapFile.java
@@ -98,8 +98,16 @@ public class MapFile {
private long lastIndexKeyCount = Long.MIN_VALUE;
- /** Create the named map for keys of the named class.
+ /**
+ * Create the named map for keys of the named class.
* @deprecated Use Writer(Configuration, Path, Option...) instead.
+ *
+ * @param conf configuration.
+ * @param fs filesystem.
+ * @param dirName dirName.
+ * @param keyClass keyClass.
+ * @param valClass valClass.
+ * @throws IOException raised on errors performing I/O.
*/
@Deprecated
public Writer(Configuration conf, FileSystem fs, String dirName,
@@ -108,8 +116,18 @@ public class MapFile {
this(conf, new Path(dirName), keyClass(keyClass), valueClass(valClass));
}
- /** Create the named map for keys of the named class.
+ /**
+ * Create the named map for keys of the named class.
* @deprecated Use Writer(Configuration, Path, Option...) instead.
+ *
+ * @param conf configuration.
+ * @param fs fs.
+ * @param dirName dirName.
+ * @param keyClass keyClass.
+ * @param valClass valClass.
+ * @param compress compress.
+ * @param progress progress.
+ * @throws IOException raised on errors performing I/O.
*/
@Deprecated
public Writer(Configuration conf, FileSystem fs, String dirName,
@@ -120,8 +138,19 @@ public class MapFile {
compression(compress), progressable(progress));
}
- /** Create the named map for keys of the named class.
+ /**
+ * Create the named map for keys of the named class.
* @deprecated Use Writer(Configuration, Path, Option...) instead.
+ *
+ * @param conf configuration.
+ * @param fs FileSystem.
+ * @param dirName dirName.
+ * @param keyClass keyClass.
+ * @param valClass valClass.
+ * @param compress compress.
+ * @param codec codec.
+ * @param progress progress.
+ * @throws IOException raised on errors performing I/O.
*/
@Deprecated
public Writer(Configuration conf, FileSystem fs, String dirName,
@@ -132,8 +161,16 @@ public class MapFile {
compression(compress, codec), progressable(progress));
}
- /** Create the named map for keys of the named class.
+ /**
+ * Create the named map for keys of the named class.
* @deprecated Use Writer(Configuration, Path, Option...) instead.
+ * @param conf configuration.
+ * @param fs fs.
+ * @param dirName dirName.
+ * @param keyClass keyClass.
+ * @param valClass valClass.
+ * @param compress compress.
+ * @throws IOException raised on errors performing I/O.
*/
@Deprecated
public Writer(Configuration conf, FileSystem fs, String dirName,
@@ -145,6 +182,12 @@ public class MapFile {
/** Create the named map using the named key comparator.
* @deprecated Use Writer(Configuration, Path, Option...) instead.
+ * @param conf configuration.
+ * @param fs fs.
+ * @param dirName dirName.
+ * @param comparator comparator.
+ * @param valClass valClass.
+ * @throws IOException raised on errors performing I/O.
*/
@Deprecated
public Writer(Configuration conf, FileSystem fs, String dirName,
@@ -154,7 +197,14 @@ public class MapFile {
valueClass(valClass));
}
- /** Create the named map using the named key comparator.
+ /** Create the named map using the named key comparator.
+ * @param conf configuration.
+ * @param fs filesystem.
+ * @param dirName dirName.
+ * @param comparator comparator.
+ * @param valClass valClass.
+ * @param compress compress.
+ * @throws IOException raised on errors performing I/O.
* @deprecated Use Writer(Configuration, Path, Option...) instead.
*/
@Deprecated
@@ -165,8 +215,18 @@ public class MapFile {
valueClass(valClass), compression(compress));
}
- /** Create the named map using the named key comparator.
+ /**
+ * Create the named map using the named key comparator.
* @deprecated Use Writer(Configuration, Path, Option...)} instead.
+ *
+ * @param conf configuration.
+ * @param fs filesystem.
+ * @param dirName dirName.
+ * @param comparator comparator.
+ * @param valClass valClass.
+ * @param compress CompressionType.
+ * @param progress progress.
+ * @throws IOException raised on errors performing I/O.
*/
@Deprecated
public Writer(Configuration conf, FileSystem fs, String dirName,
@@ -178,8 +238,19 @@ public class MapFile {
progressable(progress));
}
- /** Create the named map using the named key comparator.
+ /**
+ * Create the named map using the named key comparator.
* @deprecated Use Writer(Configuration, Path, Option...) instead.
+ *
+ * @param conf configuration.
+ * @param fs FileSystem.
+ * @param dirName dirName.
+ * @param comparator comparator.
+ * @param valClass valClass.
+ * @param compress CompressionType.
+ * @param codec codec.
+ * @param progress progress.
+ * @throws IOException raised on errors performing I/O.
*/
@Deprecated
public Writer(Configuration conf, FileSystem fs, String dirName,
@@ -285,16 +356,26 @@ public class MapFile {
this.index = SequenceFile.createWriter(conf, indexOptions);
}
- /** The number of entries that are added before an index entry is added.*/
+ /**
+ * The number of entries that are added before an index entry is added.
+ * @return indexInterval
+ */
public int getIndexInterval() { return indexInterval; }
- /** Sets the index interval.
+ /**
+ * Sets the index interval.
* @see #getIndexInterval()
+ *
+ * @param interval interval.
*/
public void setIndexInterval(int interval) { indexInterval = interval; }
- /** Sets the index interval and stores it in conf
+ /**
+ * Sets the index interval and stores it in conf.
* @see #getIndexInterval()
+ *
+ * @param conf configuration.
+ * @param interval interval.
*/
public static void setIndexInterval(Configuration conf, int interval) {
conf.setInt(INDEX_INTERVAL, interval);
@@ -307,8 +388,14 @@ public class MapFile {
index.close();
}
- /** Append a key/value pair to the map. The key must be greater or equal
- * to the previous key added to the map. */
+ /**
+ * Append a key/value pair to the map. The key must be greater or equal
+ * to the previous key added to the map.
+ *
+ * @param key key.
+ * @param val value.
+ * @throws IOException raised on errors performing I/O.
+ */
public synchronized void append(WritableComparable key, Writable val)
throws IOException {
@@ -370,10 +457,18 @@ public class MapFile {
private WritableComparable[] keys;
private long[] positions;
- /** Returns the class of keys in this file. */
+ /**
+ * Returns the class of keys in this file.
+ *
+ * @return keyClass.
+ */
public Class> getKeyClass() { return data.getKeyClass(); }
- /** Returns the class of values in this file. */
+ /**
+ * Returns the class of values in this file.
+ *
+ * @return Value Class.
+ */
public Class> getValueClass() { return data.getValueClass(); }
public static interface Option extends SequenceFile.Reader.Option {}
@@ -403,8 +498,14 @@ public class MapFile {
open(dir, comparator, conf, opts);
}
- /** Construct a map reader for the named map.
+ /**
+ * Construct a map reader for the named map.
* @deprecated
+ *
+ * @param fs FileSystem.
+ * @param dirName dirName.
+ * @param conf configuration.
+ * @throws IOException raised on errors performing I/O.
*/
@Deprecated
public Reader(FileSystem fs, String dirName,
@@ -412,8 +513,15 @@ public class MapFile {
this(new Path(dirName), conf);
}
- /** Construct a map reader for the named map using the named comparator.
+ /**
+ * Construct a map reader for the named map using the named comparator.
* @deprecated
+ *
+ * @param fs FileSystem.
+ * @param dirName dirName.
+ * @param comparator WritableComparator.
+ * @param conf Configuration.
+ * @throws IOException raised on errors performing I/O.
*/
@Deprecated
public Reader(FileSystem fs, String dirName, WritableComparator comparator,
@@ -450,6 +558,12 @@ public class MapFile {
/**
* Override this method to specialize the type of
* {@link SequenceFile.Reader} returned.
+ *
+ * @param dataFile data file.
+ * @param conf configuration.
+ * @param options options.
+ * @throws IOException raised on errors performing I/O.
+ * @return SequenceFile.Reader.
*/
protected SequenceFile.Reader
createDataFileReader(Path dataFile, Configuration conf,
@@ -516,13 +630,21 @@ public class MapFile {
}
}
- /** Re-positions the reader before its first key. */
+ /**
+ * Re-positions the reader before its first key.
+ *
+ * @throws IOException raised on errors performing I/O.
+ */
public synchronized void reset() throws IOException {
data.seek(firstPosition);
}
- /** Get the key at approximately the middle of the file. Or null if the
- * file is empty.
+ /**
+ * Get the key at approximately the middle of the file. Or null if the
+ * file is empty.
+ *
+ * @throws IOException raised on errors performing I/O.
+ * @return WritableComparable.
*/
public synchronized WritableComparable midKey() throws IOException {
@@ -534,9 +656,11 @@ public class MapFile {
return keys[(count - 1) / 2];
}
- /** Reads the final key from the file.
+ /**
+ * Reads the final key from the file.
*
* @param key key to read into
+ * @throws IOException raised on errors performing I/O.
*/
public synchronized void finalKey(WritableComparable key)
throws IOException {
@@ -556,9 +680,14 @@ public class MapFile {
}
}
- /** Positions the reader at the named key, or if none such exists, at the
+ /**
+ * Positions the reader at the named key, or if none such exists, at the
* first entry after the named key. Returns true iff the named key exists
* in this map.
+ *
+ * @param key key.
+ * @throws IOException raised on errors performing I/O.
+ * @return if the named key exists in this map true, not false.
*/
public synchronized boolean seek(WritableComparable key) throws IOException {
return seekInternal(key) == 0;
@@ -669,15 +798,28 @@ public class MapFile {
return -(low + 1); // key not found.
}
- /** Read the next key/value pair in the map into key
and
+ /**
+ * Read the next key/value pair in the map into key
and
* val
. Returns true if such a pair exists and false when at
- * the end of the map */
+ * the end of the map.
+ *
+ * @param key WritableComparable.
+ * @param val Writable.
+ * @return if such a pair exists true,not false.
+ * @throws IOException raised on errors performing I/O.
+ */
public synchronized boolean next(WritableComparable key, Writable val)
throws IOException {
return data.next(key, val);
}
- /** Return the value for the named key, or null if none exists. */
+ /**
+ * Return the value for the named key, or null if none exists.
+ * @param key key.
+ * @param val val.
+ * @return Writable if such a pair exists true,not false.
+ * @throws IOException raised on errors performing I/O.
+ */
public synchronized Writable get(WritableComparable key, Writable val)
throws IOException {
if (seek(key)) {
@@ -692,9 +834,10 @@ public class MapFile {
* Returns key
or if it does not exist, at the first entry
* after the named key.
*
-- * @param key - key that we're trying to find
-- * @param val - data value if key is found
-- * @return - the key that was the closest match or null if eof.
+ * @param key key that we're trying to find.
+ * @param val data value if key is found.
+ * @return the key that was the closest match or null if eof.
+ * @throws IOException raised on errors performing I/O.
*/
public synchronized WritableComparable getClosest(WritableComparable key,
Writable val)
@@ -711,6 +854,7 @@ public class MapFile {
* the first entry that falls just before the key
. Otherwise,
* return the record that sorts just after.
* @return - the key that was the closest match or null if eof.
+ * @throws IOException raised on errors performing I/O.
*/
public synchronized WritableComparable getClosest(WritableComparable key,
Writable val, final boolean before)
@@ -730,7 +874,10 @@ public class MapFile {
return nextKey;
}
- /** Close the map. */
+ /**
+ * Close the map.
+ * @throws IOException raised on errors performing I/O.
+ */
@Override
public synchronized void close() throws IOException {
if (!indexClosed) {
@@ -741,7 +888,13 @@ public class MapFile {
}
- /** Renames an existing map directory. */
+ /**
+ * Renames an existing map directory.
+ * @param fs fs.
+ * @param oldName oldName.
+ * @param newName newName.
+ * @throws IOException raised on errors performing I/O.
+ */
public static void rename(FileSystem fs, String oldName, String newName)
throws IOException {
Path oldDir = new Path(oldName);
@@ -751,7 +904,12 @@ public class MapFile {
}
}
- /** Deletes the named map file. */
+ /**
+ * Deletes the named map file.
+ * @param fs input fs.
+ * @param name input name.
+ * @throws IOException raised on errors performing I/O.
+ */
public static void delete(FileSystem fs, String name) throws IOException {
Path dir = new Path(name);
Path data = new Path(dir, DATA_FILE_NAME);
@@ -769,8 +927,9 @@ public class MapFile {
* @param keyClass key class (has to be a subclass of Writable)
* @param valueClass value class (has to be a subclass of Writable)
* @param dryrun do not perform any changes, just report what needs to be done
+ * @param conf configuration.
* @return number of valid entries in this MapFile, or -1 if no fixing was needed
- * @throws Exception
+ * @throws Exception Exception.
*/
public static long fix(FileSystem fs, Path dir,
Class extends Writable> keyClass,
@@ -870,11 +1029,12 @@ public class MapFile {
}
/**
- * Merge multiple MapFiles to one Mapfile
+ * Merge multiple MapFiles to one Mapfile.
*
- * @param inMapFiles
- * @param outMapFile
- * @throws IOException
+ * @param inMapFiles input inMapFiles.
+ * @param deleteInputs deleteInputs.
+ * @param outMapFile input outMapFile.
+ * @throws IOException raised on errors performing I/O.
*/
public void merge(Path[] inMapFiles, boolean deleteInputs,
Path outMapFile) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MultipleIOException.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MultipleIOException.java
index c9d7ade4306..452965b7c82 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MultipleIOException.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/MultipleIOException.java
@@ -42,7 +42,11 @@ public class MultipleIOException extends IOException {
/** @return the underlying exceptions */
public List getExceptions() {return exceptions;}
- /** A convenient method to create an {@link IOException}. */
+ /**
+ * A convenient method to create an {@link IOException}.
+ * @param exceptions IOException List.
+ * @return IOException.
+ */
public static IOException createIOException(List exceptions) {
if (exceptions == null || exceptions.isEmpty()) {
return null;
@@ -60,7 +64,10 @@ public class MultipleIOException extends IOException {
public static class Builder {
private List exceptions;
- /** Add the given {@link Throwable} to the exception list. */
+ /**
+ * Add the given {@link Throwable} to the exception list.
+ * @param t Throwable.
+ */
public void add(Throwable t) {
if (exceptions == null) {
exceptions = new ArrayList<>();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java
index 77c590fdb63..d6e4846264f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/NullWritable.java
@@ -32,7 +32,10 @@ public class NullWritable implements WritableComparable {
private NullWritable() {} // no public ctor
- /** Returns the single instance of this class. */
+ /**
+ * Returns the single instance of this class.
+ * @return the single instance of this class.
+ */
public static NullWritable get() { return THIS; }
@Override
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java
index b35a32f288b..29c06a01ad6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ObjectWritable.java
@@ -54,13 +54,22 @@ public class ObjectWritable implements Writable, Configurable {
this.instance = instance;
}
- /** Return the instance, or null if none. */
+ /**
+ * Return the instance, or null if none.
+ * @return the instance, or null if none.
+ */
public Object get() { return instance; }
- /** Return the class this is meant to be. */
+ /**
+ * Return the class this is meant to be.
+ * @return the class this is meant to be.
+ */
public Class getDeclaredClass() { return declaredClass; }
- /** Reset the instance. */
+ /**
+ * Reset the instance.
+ * @param instance instance.
+ */
public void set(Object instance) {
this.declaredClass = instance.getClass();
this.instance = instance;
@@ -120,8 +129,16 @@ public class ObjectWritable implements Writable, Configurable {
}
}
- /** Write a {@link Writable}, {@link String}, primitive type, or an array of
- * the preceding. */
+ /**
+ * Write a {@link Writable}, {@link String}, primitive type, or an array of
+ * the preceding.
+ *
+ * @param out DataOutput.
+ * @param instance instance.
+ * @param conf Configuration.
+ * @param declaredClass declaredClass.
+ * @throws IOException raised on errors performing I/O.
+ */
public static void writeObject(DataOutput out, Object instance,
Class declaredClass,
Configuration conf) throws IOException {
@@ -137,6 +154,13 @@ public class ObjectWritable implements Writable, Configurable {
* usages, to preserve the ability to interchange files with other clusters
* that may not be running the same version of software. Sometime in ~2013
* we can consider removing this parameter and always using the compact format.
+ *
+ * @param conf configuration.
+ * @param out dataoutput.
+ * @param declaredClass declaredClass.
+ * @param instance instance.
+ * @throws IOException raised on errors performing I/O.
+ *
*/
public static void writeObject(DataOutput out, Object instance,
Class declaredClass, Configuration conf, boolean allowCompactArrays)
@@ -210,15 +234,30 @@ public class ObjectWritable implements Writable, Configurable {
}
- /** Read a {@link Writable}, {@link String}, primitive type, or an array of
- * the preceding. */
+ /**
+ * Read a {@link Writable}, {@link String}, primitive type, or an array of
+ * the preceding.
+ *
+ * @param conf configuration.
+ * @param in DataInput.
+ * @return Object.
+ * @throws IOException raised on errors performing I/O.
+ */
public static Object readObject(DataInput in, Configuration conf)
throws IOException {
return readObject(in, null, conf);
}
- /** Read a {@link Writable}, {@link String}, primitive type, or an array of
- * the preceding. */
+ /**
+ * Read a {@link Writable}, {@link String}, primitive type, or an array of
+ * the preceding.
+ *
+ * @param in DataInput.
+ * @param objectWritable objectWritable.
+ * @param conf configuration.
+ * @return Object.
+ * @throws IOException raised on errors performing I/O.
+ */
@SuppressWarnings("unchecked")
public static Object readObject(DataInput in, ObjectWritable objectWritable, Configuration conf)
throws IOException {
@@ -365,6 +404,10 @@ public class ObjectWritable implements Writable, Configurable {
* Find and load the class with given name className by first finding
* it in the specified conf . If the specified conf is null,
* try load it directly.
+ *
+ * @param conf configuration.
+ * @param className classname.
+ * @return Class.
*/
public static Class> loadClass(Configuration conf, String className) {
Class> declaredClass = null;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java
index 15a396dc2bf..f80c0a71883 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/OutputBuffer.java
@@ -77,21 +77,33 @@ public class OutputBuffer extends FilterOutputStream {
this.buffer = buffer;
}
- /** Returns the current contents of the buffer.
+ /**
+ * Returns the current contents of the buffer.
* Data is only valid to {@link #getLength()}.
+ *
+ * @return the current contents of the buffer.
*/
public byte[] getData() { return buffer.getData(); }
- /** Returns the length of the valid data currently in the buffer. */
+ /**
+ * Returns the length of the valid data currently in the buffer.
+ * @return the length of the valid data
+ * currently in the buffer.
+ */
public int getLength() { return buffer.getLength(); }
- /** Resets the buffer to empty. */
+ /** @return Resets the buffer to empty. */
public OutputBuffer reset() {
buffer.reset();
return this;
}
- /** Writes bytes from a InputStream directly into the buffer. */
+ /**
+ * Writes bytes from a InputStream directly into the buffer.
+ * @param in input in.
+ * @param length input length.
+ * @throws IOException raised on errors performing I/O.
+ */
public void write(InputStream in, int length) throws IOException {
buffer.write(in, length);
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/RawComparator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/RawComparator.java
index a52190db5f4..354dda964e9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/RawComparator.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/RawComparator.java
@@ -29,7 +29,7 @@ import org.apache.hadoop.io.serializer.DeserializerComparator;
* A {@link Comparator} that operates directly on byte representations of
* objects.
*
- * @param
+ * @param generic type.
* @see DeserializerComparator
*/
@InterfaceAudience.Public
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java
index 65e751eca41..2a6fafce545 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ReadaheadPool.java
@@ -50,7 +50,7 @@ public class ReadaheadPool {
private static ReadaheadPool instance;
/**
- * Return the singleton instance for the current process.
+ * @return Return the singleton instance for the current process.
*/
public static ReadaheadPool getInstance() {
synchronized (ReadaheadPool.class) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java
index 016daf9f352..cddddcc6c9a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SecureIOUtils.java
@@ -90,7 +90,7 @@ public class SecureIOUtils {
private final static FileSystem rawFilesystem;
/**
- * Open the given File for random read access, verifying the expected user/
+ * @return Open the given File for random read access, verifying the expected user/
* group constraints if security is enabled.
*
* Note that this function provides no additional security checks if hadoop
@@ -114,8 +114,14 @@ public class SecureIOUtils {
}
/**
- * Same as openForRandomRead except that it will run even if security is off.
+ * @return Same as openForRandomRead except that it will run even if security is off.
* This is used by unit tests.
+ *
+ * @param f input f.
+ * @param mode input mode.
+ * @param expectedOwner input expectedOwner.
+ * @param expectedGroup input expectedGroup.
+ * @throws IOException raised on errors performing I/O.
*/
@VisibleForTesting
protected static RandomAccessFile forceSecureOpenForRandomRead(File f,
@@ -145,6 +151,7 @@ public class SecureIOUtils {
* @param expectedGroup the expected group owner for the file
* @throws IOException if an IO Error occurred or the user/group does not
* match if security is enabled
+ * @return FSDataInputStream.
*/
public static FSDataInputStream openFSDataInputStream(File file,
String expectedOwner, String expectedGroup) throws IOException {
@@ -157,6 +164,12 @@ public class SecureIOUtils {
/**
* Same as openFSDataInputStream except that it will run even if security is
* off. This is used by unit tests.
+ *
+ * @param file input file.
+ * @param expectedOwner input expectedOwner.
+ * @param expectedGroup input expectedGroup.
+ * @throws IOException raised on errors performing I/O.
+ * @return FSDataInputStream.
*/
@VisibleForTesting
protected static FSDataInputStream forceSecureOpenFSDataInputStream(
@@ -182,7 +195,7 @@ public class SecureIOUtils {
* Open the given File for read access, verifying the expected user/group
* constraints if security is enabled.
*
- * Note that this function provides no additional checks if Hadoop
+ * @return Note that this function provides no additional checks if Hadoop
* security is disabled, since doing the checks would be too expensive
* when native libraries are not available.
*
@@ -201,8 +214,12 @@ public class SecureIOUtils {
}
/**
- * Same as openForRead() except that it will run even if security is off.
+ * @return Same as openForRead() except that it will run even if security is off.
* This is used by unit tests.
+ * @param f input f.
+ * @param expectedOwner input expectedOwner.
+ * @param expectedGroup input expectedGroup.
+ * @throws IOException raised on errors performing I/O.
*/
@VisibleForTesting
protected static FileInputStream forceSecureOpenForRead(File f, String expectedOwner,
@@ -251,6 +268,7 @@ public class SecureIOUtils {
*
* @throws AlreadyExistsException if the file already exists
* @throws IOException if any other error occurred
+ * @return createForWrite FileOutputStream.
*/
public static FileOutputStream createForWrite(File f, int permissions)
throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
index 890e7916ab0..a0b45814f1c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SequenceFile.java
@@ -269,7 +269,7 @@ public class SequenceFile {
* @param conf the configuration to use
* @param opts the options to create the file with
* @return a new Writer
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static Writer createWriter(Configuration conf, Writer.Option... opts
) throws IOException {
@@ -301,7 +301,7 @@ public class SequenceFile {
* @param keyClass The 'key' type.
* @param valClass The 'value' type.
* @return Returns the handle to the constructed SequenceFile Writer.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
* @deprecated Use {@link #createWriter(Configuration, Writer.Option...)}
* instead.
*/
@@ -323,7 +323,7 @@ public class SequenceFile {
* @param valClass The 'value' type.
* @param compressionType The compression type.
* @return Returns the handle to the constructed SequenceFile Writer.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
* @deprecated Use {@link #createWriter(Configuration, Writer.Option...)}
* instead.
*/
@@ -348,7 +348,7 @@ public class SequenceFile {
* @param compressionType The compression type.
* @param progress The Progressable object to track progress.
* @return Returns the handle to the constructed SequenceFile Writer.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
* @deprecated Use {@link #createWriter(Configuration, Writer.Option...)}
* instead.
*/
@@ -375,7 +375,7 @@ public class SequenceFile {
* @param compressionType The compression type.
* @param codec The compression codec.
* @return Returns the handle to the constructed SequenceFile Writer.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
* @deprecated Use {@link #createWriter(Configuration, Writer.Option...)}
* instead.
*/
@@ -403,7 +403,7 @@ public class SequenceFile {
* @param progress The Progressable object to track progress.
* @param metadata The metadata of the file.
* @return Returns the handle to the constructed SequenceFile Writer.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
* @deprecated Use {@link #createWriter(Configuration, Writer.Option...)}
* instead.
*/
@@ -437,7 +437,7 @@ public class SequenceFile {
* @param progress The Progressable object to track progress.
* @param metadata The metadata of the file.
* @return Returns the handle to the constructed SequenceFile Writer.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
* @deprecated Use {@link #createWriter(Configuration, Writer.Option...)}
* instead.
*/
@@ -475,7 +475,7 @@ public class SequenceFile {
* @param codec The compression codec.
* @param metadata The metadata of the file.
* @return Returns the handle to the constructed SequenceFile Writer.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Deprecated
public static Writer
@@ -508,7 +508,7 @@ public class SequenceFile {
* @param createFlag gives the semantics of create: overwrite, append etc.
* @param opts file creation options; see {@link CreateOpts}.
* @return Returns the handle to the constructed SequenceFile Writer.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static Writer
createWriter(FileContext fc, Configuration conf, Path name,
@@ -532,7 +532,7 @@ public class SequenceFile {
* @param codec The compression codec.
* @param progress The Progressable object to track progress.
* @return Returns the handle to the constructed SequenceFile Writer.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
* @deprecated Use {@link #createWriter(Configuration, Writer.Option...)}
* instead.
*/
@@ -560,7 +560,7 @@ public class SequenceFile {
* @param codec The compression codec.
* @param metadata The metadata of the file.
* @return Returns the handle to the constructed SequenceFile Writer.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
* @deprecated Use {@link #createWriter(Configuration, Writer.Option...)}
* instead.
*/
@@ -585,7 +585,7 @@ public class SequenceFile {
* @param compressionType The compression type.
* @param codec The compression codec.
* @return Returns the handle to the constructed SequenceFile Writer.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
* @deprecated Use {@link #createWriter(Configuration, Writer.Option...)}
* instead.
*/
@@ -603,22 +603,26 @@ public class SequenceFile {
/** The interface to 'raw' values of SequenceFiles. */
public static interface ValueBytes {
- /** Writes the uncompressed bytes to the outStream.
+ /**
+ * Writes the uncompressed bytes to the outStream.
* @param outStream : Stream to write uncompressed bytes into.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void writeUncompressedBytes(DataOutputStream outStream)
throws IOException;
- /** Write compressed bytes to outStream.
+ /**
+ * Write compressed bytes to outStream.
* Note: that it will NOT compress the bytes if they are not compressed.
* @param outStream : Stream to write compressed bytes into.
+ * @throws IllegalArgumentException an illegal or inappropriate argument.
+ * @throws IOException raised on errors performing I/O.
*/
public void writeCompressedBytes(DataOutputStream outStream)
throws IllegalArgumentException, IOException;
/**
- * Size of stored data.
+ * @return Size of stored data.
*/
public int getSize();
}
@@ -1190,10 +1194,17 @@ public class SequenceFile {
codec, metadata, syncInterval);
}
- /** Create the named file.
+ /**
+ * Create the named file.
* @deprecated Use
* {@link SequenceFile#createWriter(Configuration, Writer.Option...)}
* instead.
+ * @param fs input filesystem.
+ * @param conf input configuration.
+ * @param name input name.
+ * @param keyClass input keyClass.
+ * @param valClass input valClass.
+ * @throws IOException raised on errors performing I/O.
*/
@Deprecated
public Writer(FileSystem fs, Configuration conf, Path name,
@@ -1203,10 +1214,19 @@ public class SequenceFile {
new Metadata(), SYNC_INTERVAL);
}
- /** Create the named file with write-progress reporter.
+ /**
+ * Create the named file with write-progress reporter.
* @deprecated Use
* {@link SequenceFile#createWriter(Configuration, Writer.Option...)}
* instead.
+ * @param fs input filesystem.
+ * @param conf input configuration.
+ * @param name input name.
+ * @param keyClass input keyClass.
+ * @param valClass input valClass.
+ * @param progress input progress.
+ * @param metadata input metadata.
+ * @throws IOException raised on errors performing I/O.
*/
@Deprecated
public Writer(FileSystem fs, Configuration conf, Path name,
@@ -1217,10 +1237,22 @@ public class SequenceFile {
null, metadata, SYNC_INTERVAL);
}
- /** Create the named file with write-progress reporter.
+ /**
+ * Create the named file with write-progress reporter.
* @deprecated Use
* {@link SequenceFile#createWriter(Configuration, Writer.Option...)}
* instead.
+ * @param fs input filesystem.
+ * @param conf input configuration.
+ * @param name input name.
+ * @param keyClass input keyClass.
+ * @param valClass input valClass.
+ * @param bufferSize input bufferSize.
+ * @param replication input replication.
+ * @param blockSize input blockSize.
+ * @param progress input progress.
+ * @param metadata input metadata.
+ * @throws IOException raised on errors performing I/O.
*/
@Deprecated
public Writer(FileSystem fs, Configuration conf, Path name,
@@ -1321,16 +1353,19 @@ public class SequenceFile {
}
}
- /** Returns the class of keys in this file. */
+ /** @return Returns the class of keys in this file. */
public Class getKeyClass() { return keyClass; }
- /** Returns the class of values in this file. */
+ /** @return Returns the class of values in this file. */
public Class getValueClass() { return valClass; }
- /** Returns the compression codec of data in this file. */
+ /** @return Returns the compression codec of data in this file. */
public CompressionCodec getCompressionCodec() { return codec; }
- /** create a sync point */
+ /**
+ * create a sync point.
+ * @throws IOException raised on errors performing I/O.
+ */
public void sync() throws IOException {
if (sync != null && lastSyncPos != out.getPos()) {
out.writeInt(SYNC_ESCAPE); // mark the start of the sync
@@ -1340,8 +1375,9 @@ public class SequenceFile {
}
/**
- * flush all currently written data to the file system
+ * flush all currently written data to the file system.
* @deprecated Use {@link #hsync()} or {@link #hflush()} instead
+ * @throws IOException raised on errors performing I/O.
*/
@Deprecated
public void syncFs() throws IOException {
@@ -1413,13 +1449,23 @@ public class SequenceFile {
}
}
- /** Append a key/value pair. */
+ /**
+ * Append a key/value pair.
+ * @param key input Writable key.
+ * @param val input Writable val.
+ * @throws IOException raised on errors performing I/O.
+ */
public void append(Writable key, Writable val)
throws IOException {
append((Object) key, (Object) val);
}
- /** Append a key/value pair. */
+ /**
+ * Append a key/value pair.
+ * @param key input Object key.
+ * @param val input Object val.
+ * @throws IOException raised on errors performing I/O.
+ */
@SuppressWarnings("unchecked")
public synchronized void append(Object key, Object val)
throws IOException {
@@ -1470,14 +1516,16 @@ public class SequenceFile {
val.writeUncompressedBytes(out); // value
}
- /** Returns the current length of the output file.
+ /** @return Returns the current length of the output file.
*
* This always returns a synchronized position. In other words,
* immediately after calling {@link SequenceFile.Reader#seek(long)} with a position
* returned by this method, {@link SequenceFile.Reader#next(Writable)} may be called. However
* the key may be earlier in the file than key last written when this
* method was called (e.g., with block-compression, it may be the first key
- * in the block that was being written when this method was called).
+ * in the block that was being written when this method was called).
+ *
+ * @throws IOException raised on errors performing I/O.
*/
public synchronized long getLength() throws IOException {
return out.getPos();
@@ -1888,7 +1936,7 @@ public class SequenceFile {
* @param fs The file system used to open the file.
* @param file The file being read.
* @param conf Configuration
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
* @deprecated Use Reader(Configuration, Option...) instead.
*/
@Deprecated
@@ -1904,7 +1952,7 @@ public class SequenceFile {
* @param start The starting position.
* @param length The length being read.
* @param conf Configuration
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
* @deprecated Use Reader(Configuration, Reader.Option...) instead.
*/
@Deprecated
@@ -1949,7 +1997,7 @@ public class SequenceFile {
* @param length The length being read if it is {@literal >=} 0.
* Otherwise, the length is not available.
* @return The opened stream.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
protected FSDataInputStream openFile(FileSystem fs, Path file,
int bufferSize, long length) throws IOException {
@@ -2139,12 +2187,12 @@ public class SequenceFile {
in.close();
}
- /** Returns the name of the key class. */
+ /** @return Returns the name of the key class. */
public String getKeyClassName() {
return keyClassName;
}
- /** Returns the class of keys in this file. */
+ /** @return Returns the class of keys in this file. */
public synchronized Class> getKeyClass() {
if (null == keyClass) {
try {
@@ -2156,12 +2204,12 @@ public class SequenceFile {
return keyClass;
}
- /** Returns the name of the value class. */
+ /** @return Returns the name of the value class. */
public String getValueClassName() {
return valClassName;
}
- /** Returns the class of values in this file. */
+ /** @return Returns the class of values in this file. */
public synchronized Class> getValueClass() {
if (null == valClass) {
try {
@@ -2173,13 +2221,22 @@ public class SequenceFile {
return valClass;
}
- /** Returns true if values are compressed. */
+ /**
+ * Returns true if values are compressed.
+ * @return if values are compressed true, not false.
+ */
public boolean isCompressed() { return decompress; }
- /** Returns true if records are block-compressed. */
+ /**
+ * Returns true if records are block-compressed.
+ * @return if records are block-compressed true, not false.
+ */
public boolean isBlockCompressed() { return blockCompressed; }
- /** Returns the compression codec of data in this file. */
+ /**
+ * Returns the compression codec of data in this file.
+ * @return CompressionCodec.
+ */
public CompressionCodec getCompressionCodec() { return codec; }
private byte[] getSync() {
@@ -2202,7 +2259,10 @@ public class SequenceFile {
}
}
- /** Returns the metadata object of the file */
+ /**
+ * Returns the metadata object of the file.
+ * @return metadata.
+ */
public Metadata getMetadata() {
return this.metadata;
}
@@ -2311,7 +2371,7 @@ public class SequenceFile {
/**
* Get the 'value' corresponding to the last read 'key'.
* @param val : The 'value' to be read.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public synchronized void getCurrentValue(Writable val)
throws IOException {
@@ -2348,9 +2408,9 @@ public class SequenceFile {
}
/**
- * Get the 'value' corresponding to the last read 'key'.
+ * @return Get the 'value' corresponding to the last read 'key'.
* @param val : The 'value' to be read.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public synchronized Object getCurrentValue(Object val)
throws IOException {
@@ -2392,8 +2452,13 @@ public class SequenceFile {
return valDeserializer.deserialize(val);
}
- /** Read the next key in the file into key
, skipping its
- * value. True if another entry exists, and false at end of file. */
+ /**
+ * @return Read the next key in the file into key
, skipping its
+ * value.True if another entry exists, and false at end of file.
+ *
+ * @param key key.
+ * @throws IOException raised on errors performing I/O.
+ */
public synchronized boolean next(Writable key) throws IOException {
if (key.getClass() != getKeyClass())
throw new IOException("wrong key class: "+key.getClass().getName()
@@ -2440,9 +2505,16 @@ public class SequenceFile {
return true;
}
- /** Read the next key/value pair in the file into key
and
- * val
. Returns true if such a pair exists and false when at
- * end of file */
+ /**
+ * Read the next key/value pair in the file into key
and
+ * val
.
+ * @return Returns true if such a pair exists and false when at
+ * end of file.
+ *
+ * @param key input key.
+ * @param val input val.
+ * @throws IOException raised on errors performing I/O.
+ */
public synchronized boolean next(Writable key, Writable val)
throws IOException {
if (val.getClass() != getValueClass())
@@ -2526,7 +2598,7 @@ public class SequenceFile {
* @param key - The buffer into which the key is read
* @param val - The 'raw' value
* @return Returns the total record length or -1 for end of file
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public synchronized int nextRaw(DataOutputBuffer key, ValueBytes val)
throws IOException {
@@ -2585,7 +2657,7 @@ public class SequenceFile {
* Read 'raw' keys.
* @param key - The buffer into which the key is read
* @return Returns the key length or -1 for end of file
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public synchronized int nextRawKey(DataOutputBuffer key)
throws IOException {
@@ -2624,8 +2696,14 @@ public class SequenceFile {
}
- /** Read the next key in the file, skipping its
- * value. Return null at end of file. */
+ /**
+ * Read the next key in the file, skipping its
+ * value.
+ *
+ * @param key input Object key.
+ * @throws IOException raised on errors performing I/O.
+ * @return Return null at end of file.
+ */
public synchronized Object next(Object key) throws IOException {
if (key != null && key.getClass() != getKeyClass()) {
throw new IOException("wrong key class: "+key.getClass().getName()
@@ -2682,7 +2760,7 @@ public class SequenceFile {
* Read 'raw' values.
* @param val - The 'raw' value
* @return Returns the value length
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public synchronized int nextRawValue(ValueBytes val)
throws IOException {
@@ -2722,16 +2800,20 @@ public class SequenceFile {
}
}
- /** disables sync. often invoked for tmp files */
+ /** disables sync. often invoked for tmp files. */
synchronized void ignoreSync() {
sync = null;
}
- /** Set the current byte position in the input file.
+ /**
+ * Set the current byte position in the input file.
*
* The position passed must be a position returned by {@link
* SequenceFile.Writer#getLength()} when writing this file. To seek to an arbitrary
- * position, use {@link SequenceFile.Reader#sync(long)}.
+ * position, use {@link SequenceFile.Reader#sync(long)}.
+ *
+ * @param position input position.
+ * @throws IOException raised on errors performing I/O.
*/
public synchronized void seek(long position) throws IOException {
in.seek(position);
@@ -2741,7 +2823,11 @@ public class SequenceFile {
}
}
- /** Seek to the next sync mark past a given position.*/
+ /**
+ * Seek to the next sync mark past a given position.
+ * @param position position.
+ * @throws IOException raised on errors performing I/O.
+ */
public synchronized void sync(long position) throws IOException {
if (position+SYNC_SIZE >= end) {
seek(end);
@@ -2777,10 +2863,13 @@ public class SequenceFile {
}
}
- /** Returns true iff the previous call to next passed a sync mark.*/
+ /** @return Returns true iff the previous call to next passed a sync mark.*/
public synchronized boolean syncSeen() { return syncSeen; }
- /** Return the current byte position in the input file. */
+ /**
+ * @return Return the current byte position in the input file.
+ * @throws IOException raised on errors performing I/O.
+ */
public synchronized long getPosition() throws IOException {
return in.getPos();
}
@@ -2822,19 +2911,40 @@ public class SequenceFile {
private Progressable progressable = null;
- /** Sort and merge files containing the named classes. */
+ /**
+ * Sort and merge files containing the named classes.
+ * @param fs input FileSystem.
+ * @param keyClass input keyClass.
+ * @param valClass input valClass.
+ * @param conf input Configuration.
+ */
public Sorter(FileSystem fs, Class extends WritableComparable> keyClass,
Class valClass, Configuration conf) {
this(fs, WritableComparator.get(keyClass, conf), keyClass, valClass, conf);
}
- /** Sort and merge using an arbitrary {@link RawComparator}. */
+ /**
+ * Sort and merge using an arbitrary {@link RawComparator}.
+ * @param fs input FileSystem.
+ * @param comparator input RawComparator.
+ * @param keyClass input keyClass.
+ * @param valClass input valClass.
+ * @param conf input Configuration.
+ */
public Sorter(FileSystem fs, RawComparator comparator, Class keyClass,
Class valClass, Configuration conf) {
this(fs, comparator, keyClass, valClass, conf, new Metadata());
}
- /** Sort and merge using an arbitrary {@link RawComparator}. */
+ /**
+ * Sort and merge using an arbitrary {@link RawComparator}.
+ * @param fs input FileSystem.
+ * @param comparator input RawComparator.
+ * @param keyClass input keyClass.
+ * @param valClass input valClass.
+ * @param conf input Configuration.
+ * @param metadata input metadata.
+ */
@SuppressWarnings("deprecation")
public Sorter(FileSystem fs, RawComparator comparator, Class keyClass,
Class valClass, Configuration conf, Metadata metadata) {
@@ -2863,19 +2973,28 @@ public class SequenceFile {
this.metadata = metadata;
}
- /** Set the number of streams to merge at once.*/
+ /**
+ * Set the number of streams to merge at once.
+ * @param factor factor.
+ */
public void setFactor(int factor) { this.factor = factor; }
- /** Get the number of streams to merge at once.*/
+ /** @return Get the number of streams to merge at once.*/
public int getFactor() { return factor; }
- /** Set the total amount of buffer memory, in bytes.*/
+ /**
+ * Set the total amount of buffer memory, in bytes.
+ * @param memory buffer memory.
+ */
public void setMemory(int memory) { this.memory = memory; }
- /** Get the total amount of buffer memory, in bytes.*/
+ /** @return Get the total amount of buffer memory, in bytes.*/
public int getMemory() { return memory; }
- /** Set the progressable object in order to report progress. */
+ /**
+ * Set the progressable object in order to report progress.
+ * @param progressable input Progressable.
+ */
public void setProgressable(Progressable progressable) {
this.progressable = progressable;
}
@@ -2885,6 +3004,7 @@ public class SequenceFile {
* @param inFiles the files to be sorted
* @param outFile the sorted output file
* @param deleteInput should the input files be deleted as they are read?
+ * @throws IOException raised on errors performing I/O.
*/
public void sort(Path[] inFiles, Path outFile,
boolean deleteInput) throws IOException {
@@ -2907,6 +3027,7 @@ public class SequenceFile {
* @param tempDir the directory where temp files are created during sort
* @param deleteInput should the input files be deleted as they are read?
* @return iterator the RawKeyValueIterator
+ * @throws IOException raised on errors performing I/O.
*/
public RawKeyValueIterator sortAndIterate(Path[] inFiles, Path tempDir,
boolean deleteInput) throws IOException {
@@ -2932,8 +3053,9 @@ public class SequenceFile {
/**
* The backwards compatible interface to sort.
- * @param inFile the input file to sort
- * @param outFile the sorted output file
+ * @param inFile the input file to sort.
+ * @param outFile the sorted output file.
+ * @throws IOException raised on errors performing I/O.
*/
public void sort(Path inFile, Path outFile) throws IOException {
sort(new Path[]{inFile}, outFile, false);
@@ -3151,27 +3273,32 @@ public class SequenceFile {
/** The interface to iterate over raw keys/values of SequenceFiles. */
public static interface RawKeyValueIterator {
- /** Gets the current raw key
+ /**
+ * Gets the current raw key.
* @return DataOutputBuffer
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
DataOutputBuffer getKey() throws IOException;
- /** Gets the current raw value
+ /**
+ * Gets the current raw value.
* @return ValueBytes
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
ValueBytes getValue() throws IOException;
- /** Sets up the current key and value (for getKey and getValue)
+ /**
+ * Sets up the current key and value (for getKey and getValue).
* @return true if there exists a key/value, false otherwise
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
boolean next() throws IOException;
- /** closes the iterator so that the underlying streams can be closed
- * @throws IOException
+ /**
+ * closes the iterator so that the underlying streams can be closed.
+ * @throws IOException raised on errors performing I/O.
*/
void close() throws IOException;
- /** Gets the Progress object; this has a float (0.0 - 1.0)
- * indicating the bytes processed by the iterator so far
+ /**
+ * @return Gets the Progress object; this has a float (0.0 - 1.0)
+ * indicating the bytes processed by the iterator so far.
*/
Progress getProgress();
}
@@ -3181,7 +3308,7 @@ public class SequenceFile {
* @param segments the list of SegmentDescriptors
* @param tmpDir the directory to write temporary files into
* @return RawKeyValueIterator
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public RawKeyValueIterator merge(List segments,
Path tmpDir)
@@ -3199,7 +3326,7 @@ public class SequenceFile {
* unnecessary
* @param tmpDir the directory to write temporary files into
* @return RawKeyValueIteratorMergeQueue
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public RawKeyValueIterator merge(Path [] inNames, boolean deleteInputs,
Path tmpDir)
@@ -3217,7 +3344,7 @@ public class SequenceFile {
* @param factor the factor that will be used as the maximum merge fan-in
* @param tmpDir the directory to write temporary files into
* @return RawKeyValueIteratorMergeQueue
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public RawKeyValueIterator merge(Path [] inNames, boolean deleteInputs,
int factor, Path tmpDir)
@@ -3243,7 +3370,7 @@ public class SequenceFile {
* @param deleteInputs true if the input files should be deleted when
* unnecessary
* @return RawKeyValueIteratorMergeQueue
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public RawKeyValueIterator merge(Path [] inNames, Path tempDir,
boolean deleteInputs)
@@ -3274,7 +3401,7 @@ public class SequenceFile {
* @param outputFile the path of the output file
* @param prog the Progressable to report status during the file write
* @return Writer
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Writer cloneFileAttributes(Path inputFile, Path outputFile,
Progressable prog) throws IOException {
@@ -3296,10 +3423,10 @@ public class SequenceFile {
/**
* Writes records from RawKeyValueIterator into a file represented by the
- * passed writer
+ * passed writer.
* @param records the RawKeyValueIterator
* @param writer the Writer created earlier
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void writeFile(RawKeyValueIterator records, Writer writer)
throws IOException {
@@ -3313,7 +3440,7 @@ public class SequenceFile {
/** Merge the provided files.
* @param inFiles the array of input path names
* @param outFile the final output file
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void merge(Path[] inFiles, Path outFile) throws IOException {
if (fs.exists(outFile)) {
@@ -3649,10 +3776,13 @@ public class SequenceFile {
this.segmentPathName = segmentPathName;
}
- /** Do the sync checks */
+ /** Do the sync checks. */
public void doSync() {ignoreSync = false;}
- /** Whether to delete the files when no longer needed */
+ /**
+ * Whether to delete the files when no longer needed.
+ * @param preserve input boolean preserve.
+ */
public void preserveInput(boolean preserve) {
preserveInput = preserve;
}
@@ -3694,9 +3824,10 @@ public class SequenceFile {
return 37 * 17 + (int) (segmentOffset^(segmentOffset>>>32));
}
- /** Fills up the rawKey object with the key returned by the Reader
+ /**
+ * Fills up the rawKey object with the key returned by the Reader.
* @return true if there is a key returned; false, otherwise
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public boolean nextRawKey() throws IOException {
if (in == null) {
@@ -3725,18 +3856,19 @@ public class SequenceFile {
return (keyLength >= 0);
}
- /** Fills up the passed rawValue with the value corresponding to the key
- * read earlier
- * @param rawValue
+ /**
+ * Fills up the passed rawValue with the value corresponding to the key
+ * read earlier.
+ * @param rawValue input ValueBytes rawValue.
* @return the length of the value
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public int nextRawValue(ValueBytes rawValue) throws IOException {
int valLength = in.nextRawValue(rawValue);
return valLength;
}
- /** Returns the stored rawKey */
+ /** @return Returns the stored rawKey */
public DataOutputBuffer getKey() {
return rawKey;
}
@@ -3747,8 +3879,10 @@ public class SequenceFile {
this.in = null;
}
- /** The default cleanup. Subclasses can override this with a custom
- * cleanup
+ /**
+ * The default cleanup. Subclasses can override this with a custom
+ * cleanup.
+ * @throws IOException raised on errors performing I/O.
*/
public void cleanup() throws IOException {
close();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java
index 118cce75136..de75810df0f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/SetFile.java
@@ -39,15 +39,29 @@ public class SetFile extends MapFile {
*/
public static class Writer extends MapFile.Writer {
- /** Create the named set for keys of the named class.
- * @deprecated pass a Configuration too
+ /**
+ * Create the named set for keys of the named class.
+ * @deprecated pass a Configuration too
+ * @param fs input FileSystem.
+ * @param dirName input dirName.
+ * @param keyClass input keyClass.
+ * @throws IOException raised on errors performing I/O.
*/
public Writer(FileSystem fs, String dirName,
Class extends WritableComparable> keyClass) throws IOException {
super(new Configuration(), fs, dirName, keyClass, NullWritable.class);
}
- /** Create a set naming the element class and compression type. */
+ /**
+ * Create a set naming the element class and compression type.
+ *
+ * @param conf input Configuration.
+ * @param fs input FileSystem.
+ * @param dirName input dirName.
+ * @param keyClass input keyClass.
+ * @param compress input compress.
+ * @throws IOException raised on errors performing I/O.
+ */
public Writer(Configuration conf, FileSystem fs, String dirName,
Class extends WritableComparable> keyClass,
SequenceFile.CompressionType compress)
@@ -55,7 +69,16 @@ public class SetFile extends MapFile {
this(conf, fs, dirName, WritableComparator.get(keyClass, conf), compress);
}
- /** Create a set naming the element comparator and compression type. */
+ /**
+ * Create a set naming the element comparator and compression type.
+ *
+ * @param conf input Configuration.
+ * @param fs input FileSystem.
+ * @param dirName input dirName.
+ * @param comparator input comparator.
+ * @param compress input compress.
+ * @throws IOException raised on errors performing I/O.
+ */
public Writer(Configuration conf, FileSystem fs, String dirName,
WritableComparator comparator,
SequenceFile.CompressionType compress) throws IOException {
@@ -65,8 +88,12 @@ public class SetFile extends MapFile {
compression(compress));
}
- /** Append a key to a set. The key must be strictly greater than the
- * previous key added to the set. */
+ /**
+ * Append a key to a set. The key must be strictly greater than the
+ * previous key added to the set.
+ * @param key input key.
+ * @throws IOException raised on errors performing I/O.
+ */
public void append(WritableComparable key) throws IOException{
append(key, NullWritable.get());
}
@@ -75,12 +102,25 @@ public class SetFile extends MapFile {
/** Provide access to an existing set file. */
public static class Reader extends MapFile.Reader {
- /** Construct a set reader for the named set.*/
+ /**
+ * Construct a set reader for the named set.
+ * @param fs input FileSystem.
+ * @param dirName input dirName.
+ * @param conf input Configuration.
+ * @throws IOException raised on errors performing I/O.
+ */
public Reader(FileSystem fs, String dirName, Configuration conf) throws IOException {
super(fs, dirName, conf);
}
- /** Construct a set reader for the named set using the named comparator.*/
+ /**
+ * Construct a set reader for the named set using the named comparator.
+ * @param fs input FileSystem.
+ * @param dirName input dirName.
+ * @param comparator input comparator.
+ * @param conf input Configuration.
+ * @throws IOException raised on errors performing I/O.
+ */
public Reader(FileSystem fs, String dirName, WritableComparator comparator, Configuration conf)
throws IOException {
super(new Path(dirName), conf, comparator(comparator));
@@ -93,15 +133,26 @@ public class SetFile extends MapFile {
return super.seek(key);
}
- /** Read the next key in a set into key
. Returns
- * true if such a key exists and false when at the end of the set. */
+ /**
+ * Read the next key in a set into key
.
+ *
+ * @param key input key.
+ * @return Returns true if such a key exists
+ * and false when at the end of the set.
+ * @throws IOException raised on errors performing I/O.
+ */
public boolean next(WritableComparable key)
throws IOException {
return next(key, NullWritable.get());
}
- /** Read the matching key from a set into key
.
- * Returns key
, or null if no match exists. */
+ /**
+ * Read the matching key from a set into key
.
+ *
+ * @param key input key.
+ * @return Returns key
, or null if no match exists.
+ * @throws IOException raised on errors performing I/O.
+ */
public WritableComparable get(WritableComparable key)
throws IOException {
if (seek(key)) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ShortWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ShortWritable.java
index be09df18017..96e6cacae87 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ShortWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/ShortWritable.java
@@ -38,12 +38,15 @@ public class ShortWritable implements WritableComparable {
set(value);
}
- /** Set the value of this ShortWritable. */
+ /**
+ * Set the value of this ShortWritable.
+ * @param value input value.
+ */
public void set(short value) {
this.value = value;
}
- /** Return the value of this ShortWritable. */
+ /** @return Return the value of this ShortWritable. */
public short get() {
return value;
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
index 5ca7f3c84ca..86fb1ff9a54 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Text.java
@@ -90,6 +90,7 @@ public class Text extends BinaryComparable
/**
* Construct from a string.
+ * @param string input string.
*/
public Text(String string) {
set(string);
@@ -97,6 +98,7 @@ public class Text extends BinaryComparable
/**
* Construct from another text.
+ * @param utf8 input utf8.
*/
public Text(Text utf8) {
set(utf8);
@@ -104,13 +106,15 @@ public class Text extends BinaryComparable
/**
* Construct from a byte array.
+ *
+ * @param utf8 input utf8.
*/
public Text(byte[] utf8) {
set(utf8);
}
/**
- * Get a copy of the bytes that is exactly the length of the data.
+ * @return Get a copy of the bytes that is exactly the length of the data.
* See {@link #getBytes()} for faster access to the underlying array.
*/
public byte[] copyBytes() {
@@ -136,7 +140,7 @@ public class Text extends BinaryComparable
}
/**
- * Returns the length of this text. The length is equal to the number of
+ * @return Returns the length of this text. The length is equal to the number of
* Unicode code units in the text.
*/
public int getTextLength() {
@@ -149,7 +153,9 @@ public class Text extends BinaryComparable
/**
* Returns the Unicode Scalar Value (32-bit integer value)
* for the character at position
. Note that this
- * method avoids using the converter or doing String instantiation
+ * method avoids using the converter or doing String instantiation.
+ *
+ * @param position input position.
* @return the Unicode scalar value at position or -1
* if the position is invalid or points to a
* trailing byte
@@ -172,6 +178,9 @@ public class Text extends BinaryComparable
* position is measured in bytes and the return value is in
* terms of byte position in the buffer. The backing buffer is
* not converted to a string for this operation.
+ *
+ * @param what input what.
+ * @param start input start.
* @return byte position of the first occurrence of the search
* string in the UTF-8 buffer or -1 if not found
*/
@@ -213,6 +222,8 @@ public class Text extends BinaryComparable
/**
* Set to contain the contents of a string.
+ *
+ * @param string input string.
*/
public void set(String string) {
try {
@@ -229,6 +240,8 @@ public class Text extends BinaryComparable
* Set to a utf8 byte array. If the length of utf8
is
* zero , actually clear {@link #bytes} and any existing
* data is lost.
+ *
+ * @param utf8 input utf8.
*/
public void set(byte[] utf8) {
if (utf8.length == 0) {
@@ -242,6 +255,7 @@ public class Text extends BinaryComparable
/**
* Copy a text.
+ * @param other other.
*/
public void set(Text other) {
set(other.getBytes(), 0, other.getLength());
@@ -349,6 +363,8 @@ public class Text extends BinaryComparable
/**
* Skips over one Text in the input.
+ * @param in input in.
+ * @throws IOException raised on errors performing I/O.
*/
public static void skip(DataInput in) throws IOException {
int length = WritableUtils.readVInt(in);
@@ -359,6 +375,10 @@ public class Text extends BinaryComparable
* Read a Text object whose length is already known.
* This allows creating Text from a stream which uses a different serialization
* format.
+ *
+ * @param in input in.
+ * @param len input len.
+ * @throws IOException raised on errors performing I/O.
*/
public void readWithKnownLength(DataInput in, int len) throws IOException {
ensureCapacity(len);
@@ -426,9 +446,13 @@ public class Text extends BinaryComparable
/// STATIC UTILITIES FROM HERE DOWN
/**
- * Converts the provided byte array to a String using the
+ * @return Converts the provided byte array to a String using the
* UTF-8 encoding. If the input is malformed,
* replace by a default value.
+ *
+ * @param utf8 input utf8.
+ * @throws CharacterCodingException when a character
+ * encoding or decoding error occurs.
*/
public static String decode(byte[] utf8) throws CharacterCodingException {
return decode(ByteBuffer.wrap(utf8), true);
@@ -440,11 +464,18 @@ public class Text extends BinaryComparable
}
/**
- * Converts the provided byte array to a String using the
+ * @return Converts the provided byte array to a String using the
* UTF-8 encoding. If replace
is true, then
* malformed input is replaced with the
* substitution character, which is U+FFFD. Otherwise the
* method throws a MalformedInputException.
+ *
+ * @param utf8 input utf8.
+ * @param start input start.
+ * @param length input length.
+ * @param replace input replace.
+ * @throws CharacterCodingException when a character
+ * encoding or decoding error occurs.
*/
public static String decode(byte[] utf8, int start, int length, boolean replace)
throws CharacterCodingException {
@@ -472,8 +503,12 @@ public class Text extends BinaryComparable
* Converts the provided String to bytes using the
* UTF-8 encoding. If the input is malformed,
* invalid chars are replaced by a default value.
+ *
+ * @param string input string.
* @return ByteBuffer: bytes stores at ByteBuffer.array()
* and length is ByteBuffer.limit()
+ * @throws CharacterCodingException when a character
+ * encoding or decoding error occurs.
*/
public static ByteBuffer encode(String string)
@@ -487,8 +522,13 @@ public class Text extends BinaryComparable
* malformed input is replaced with the
* substitution character, which is U+FFFD. Otherwise the
* method throws a MalformedInputException.
+ *
+ * @param string input string.
+ * @param replace input replace.
* @return ByteBuffer: bytes stores at ByteBuffer.array()
* and length is ByteBuffer.limit()
+ * @throws CharacterCodingException when a character
+ * encoding or decoding error occurs.
*/
public static ByteBuffer encode(String string, boolean replace)
throws CharacterCodingException {
@@ -508,13 +548,20 @@ public class Text extends BinaryComparable
static final public int DEFAULT_MAX_LEN = 1024 * 1024;
- /** Read a UTF8 encoded string from in
+ /**
+ * @return Read a UTF8 encoded string from in.
+ * @param in input in.
+ * @throws IOException raised on errors performing I/O.
*/
public static String readString(DataInput in) throws IOException {
return readString(in, Integer.MAX_VALUE);
}
- /** Read a UTF8 encoded string with a maximum size
+ /**
+ * @return Read a UTF8 encoded string with a maximum size.
+ * @param in input datainput.
+ * @param maxLength input maxLength.
+ * @throws IOException raised on errors performing I/O.
*/
public static String readString(DataInput in, int maxLength)
throws IOException {
@@ -526,6 +573,11 @@ public class Text extends BinaryComparable
/**
* Write a UTF8 encoded string to out.
+ *
+ * @param out input out.
+ * @param s input s.
+ * @throws IOException raised on errors performing I/O.
+ * @return a UTF8 encoded string to out.
*/
public static int writeString(DataOutput out, String s) throws IOException {
ByteBuffer bytes = encode(s);
@@ -536,7 +588,12 @@ public class Text extends BinaryComparable
}
/**
- * Write a UTF8 encoded string with a maximum size to out.
+ * @return Write a UTF8 encoded string with a maximum size to out.
+ *
+ * @param out input out.
+ * @param s input s.
+ * @param maxLength input maxLength.
+ * @throws IOException raised on errors performing I/O.
*/
public static int writeString(DataOutput out, String s, int maxLength)
throws IOException {
@@ -670,9 +727,11 @@ public class Text extends BinaryComparable
3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 4, 5, 5, 5, 5 };
/**
- * Returns the next code point at the current position in
+ * @return Returns the next code point at the current position in
* the buffer. The buffer's position will be incremented.
* Any mark set on this buffer will be changed by this method!
+ *
+ * @param bytes input bytes.
*/
public static int bytesToCodePoint(ByteBuffer bytes) {
bytes.mark();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java
index f5d33a13005..fdee830e6fe 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/UTF8.java
@@ -63,27 +63,36 @@ public class UTF8 implements WritableComparable {
//set("");
}
- /** Construct from a given string. */
+ /**
+ * Construct from a given string.
+ * @param string input string.
+ */
public UTF8(String string) {
set(string);
}
- /** Construct from a given string. */
+ /**
+ * Construct from a given string.
+ * @param utf8 input utf8.
+ */
public UTF8(UTF8 utf8) {
set(utf8);
}
- /** The raw bytes. */
+ /** @return The raw bytes. */
public byte[] getBytes() {
return bytes;
}
- /** The number of bytes in the encoded string. */
+ /** @return The number of bytes in the encoded string. */
public int getLength() {
return length;
}
- /** Set to contain the contents of a string. */
+ /**
+ * Set to contain the contents of a string.
+ * @param string input string.
+ */
public void set(String string) {
if (string.length() > 0xffff/3) { // maybe too long
LOG.warn("truncating long string: " + string.length()
@@ -108,7 +117,10 @@ public class UTF8 implements WritableComparable {
}
}
- /** Set to contain the contents of a string. */
+ /**
+ * Set to contain the contents of a string.
+ * @param other input other.
+ */
public void set(UTF8 other) {
length = other.length;
if (bytes == null || length > bytes.length) // grow buffer
@@ -124,7 +136,11 @@ public class UTF8 implements WritableComparable {
in.readFully(bytes, 0, length);
}
- /** Skips over one UTF8 in the input. */
+ /**
+ * Skips over one UTF8 in the input.
+ * @param in datainput.
+ * @throws IOException raised on errors performing I/O.
+ */
public static void skip(DataInput in) throws IOException {
int length = in.readUnsignedShort();
WritableUtils.skipFully(in, length);
@@ -214,8 +230,10 @@ public class UTF8 implements WritableComparable {
/// These are probably not used much anymore, and might be removed...
- /** Convert a string to a UTF-8 encoded byte array.
+ /**
+ * @return Convert a string to a UTF-8 encoded byte array.
* @see String#getBytes(String)
+ * @param string input string.
*/
public static byte[] getBytes(String string) {
byte[] result = new byte[utf8Length(string)];
@@ -231,8 +249,9 @@ public class UTF8 implements WritableComparable {
}
/**
- * Convert a UTF-8 encoded byte array back into a string.
+ * @return Convert a UTF-8 encoded byte array back into a string.
*
+ * @param bytes input bytes.
* @throws IOException if the byte array is invalid UTF8
*/
public static String fromBytes(byte[] bytes) throws IOException {
@@ -243,9 +262,12 @@ public class UTF8 implements WritableComparable {
return buf.toString();
}
- /** Read a UTF-8 encoded string.
+ /**
+ * @return Read a UTF-8 encoded string.
*
* @see DataInput#readUTF()
+ * @param in DataInput.
+ * @throws IOException raised on errors performing I/O.
*/
public static String readString(DataInput in) throws IOException {
int bytes = in.readUnsignedShort();
@@ -318,9 +340,13 @@ public class UTF8 implements WritableComparable {
return (char) ((codePoint & 0x3ff) + Character.MIN_LOW_SURROGATE);
}
- /** Write a UTF-8 encoded string.
+ /**
+ * @return Write a UTF-8 encoded string.
*
* @see DataOutput#writeUTF(String)
+ * @param out input out.
+ * @param s input s.
+ * @throws IOException raised on errors performing I/O.
*/
public static int writeString(DataOutput out, String s) throws IOException {
if (s.length() > 0xffff/3) { // maybe too long
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java
index f537524c4b4..7d3f680858e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VIntWritable.java
@@ -37,10 +37,13 @@ public class VIntWritable implements WritableComparable {
public VIntWritable(int value) { set(value); }
- /** Set the value of this VIntWritable. */
+ /**
+ * Set the value of this VIntWritable.
+ * @param value input value.
+ */
public void set(int value) { this.value = value; }
- /** Return the value of this VIntWritable. */
+ /** @return Return the value of this VIntWritable. */
public int get() { return value; }
@Override
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java
index a9fac30605b..a72a7fc2fd3 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VLongWritable.java
@@ -37,10 +37,13 @@ public class VLongWritable implements WritableComparable {
public VLongWritable(long value) { set(value); }
- /** Set the value of this LongWritable. */
+ /**
+ * Set the value of this LongWritable.
+ * @param value input value.
+ */
public void set(long value) { this.value = value; }
- /** Return the value of this LongWritable. */
+ /** @return Return the value of this LongWritable. */
public long get() { return value; }
@Override
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionedWritable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionedWritable.java
index c2db55520c9..421b8daeeae 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionedWritable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/VersionedWritable.java
@@ -36,7 +36,7 @@ import org.apache.hadoop.classification.InterfaceStability;
@InterfaceStability.Stable
public abstract class VersionedWritable implements Writable {
- /** Return the version number of the current implementation. */
+ /** @return Return the version number of the current implementation. */
public abstract byte getVersion();
// javadoc from Writable
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Writable.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Writable.java
index b94de6c3c72..56b46d554fe 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Writable.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/Writable.java
@@ -71,7 +71,7 @@ public interface Writable {
* Serialize the fields of this object to out
.
*
* @param out DataOuput
to serialize this object into.
- * @throws IOException
+ * @throws IOException any other problem for write.
*/
void write(DataOutput out) throws IOException;
@@ -82,7 +82,7 @@ public interface Writable {
* existing object where possible.
*
* @param in DataInput
to deseriablize this object from.
- * @throws IOException
+ * @throws IOException any other problem for readFields.
*/
void readFields(DataInput in) throws IOException;
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java
index 1754b8d06f6..05d4e3c5c53 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableComparator.java
@@ -46,12 +46,22 @@ public class WritableComparator implements RawComparator, Configurable {
private Configuration conf;
- /** For backwards compatibility. **/
+ /**
+ * For backwards compatibility.
+ *
+ * @param c WritableComparable Type.
+ * @return WritableComparator.
+ */
public static WritableComparator get(Class extends WritableComparable> c) {
return get(c, null);
}
- /** Get a comparator for a {@link WritableComparable} implementation. */
+ /**
+ * Get a comparator for a {@link WritableComparable} implementation.
+ * @param c class.
+ * @param conf configuration.
+ * @return WritableComparator.
+ */
public static WritableComparator get(
Class extends WritableComparable> c, Configuration conf) {
WritableComparator comparator = comparators.get(c);
@@ -95,9 +105,13 @@ public class WritableComparator implements RawComparator, Configurable {
}
}
- /** Register an optimized comparator for a {@link WritableComparable}
+ /**
+ * Register an optimized comparator for a {@link WritableComparable}
* implementation. Comparators registered with this method must be
- * thread-safe. */
+ * thread-safe.
+ * @param c class.
+ * @param comparator WritableComparator.
+ */
public static void define(Class c, WritableComparator comparator) {
comparators.put(c, comparator);
}
@@ -111,7 +125,10 @@ public class WritableComparator implements RawComparator, Configurable {
this(null);
}
- /** Construct for a {@link WritableComparable} implementation. */
+ /**
+ * Construct for a {@link WritableComparable} implementation.
+ * @param keyClass WritableComparable Class.
+ */
protected WritableComparator(Class extends WritableComparable> keyClass) {
this(keyClass, null, false);
}
@@ -136,10 +153,16 @@ public class WritableComparator implements RawComparator, Configurable {
}
}
- /** Returns the WritableComparable implementation class. */
+ /**
+ * Returns the WritableComparable implementation class.
+ * @return WritableComparable.
+ */
public Class extends WritableComparable> getKeyClass() { return keyClass; }
- /** Construct a new {@link WritableComparable} instance. */
+ /**
+ * Construct a new {@link WritableComparable} instance.
+ * @return WritableComparable.
+ */
public WritableComparable newKey() {
return ReflectionUtils.newInstance(keyClass, conf);
}
@@ -168,27 +191,54 @@ public class WritableComparator implements RawComparator, Configurable {
return compare(key1, key2); // compare them
}
- /** Compare two WritableComparables.
+ /**
+ * Compare two WritableComparables.
*
- * The default implementation uses the natural ordering, calling {@link
- * Comparable#compareTo(Object)}. */
+ * The default implementation uses the natural ordering, calling {@link
+ * Comparable#compareTo(Object)}.
+ * @param a the first object to be compared.
+ * @param b the second object to be compared.
+ * @return compare result.
+ */
@SuppressWarnings("unchecked")
public int compare(WritableComparable a, WritableComparable b) {
return a.compareTo(b);
}
+ /**
+ * Compare two Object.
+ *
+ * @param a the first object to be compared.
+ * @param b the second object to be compared.
+ * @return compare result.
+ */
@Override
public int compare(Object a, Object b) {
return compare((WritableComparable)a, (WritableComparable)b);
}
- /** Lexicographic order of binary data. */
+ /**
+ * Lexicographic order of binary data.
+ * @param b1 b1.
+ * @param s1 s1.
+ * @param l1 l1.
+ * @param b2 b2.
+ * @param s2 s2.
+ * @param l2 l2.
+ * @return compare bytes.
+ */
public static int compareBytes(byte[] b1, int s1, int l1,
byte[] b2, int s2, int l2) {
return FastByteComparisons.compareTo(b1, s1, l1, b2, s2, l2);
}
- /** Compute hash for binary data. */
+ /**
+ * Compute hash for binary data.
+ * @param bytes bytes.
+ * @param offset offset.
+ * @param length length.
+ * @return hash for binary data.
+ */
public static int hashBytes(byte[] bytes, int offset, int length) {
int hash = 1;
for (int i = offset; i < offset + length; i++)
@@ -196,18 +246,33 @@ public class WritableComparator implements RawComparator, Configurable {
return hash;
}
- /** Compute hash for binary data. */
+ /**
+ * Compute hash for binary data.
+ * @param bytes bytes.
+ * @param length length.
+ * @return hash for binary data.
+ */
public static int hashBytes(byte[] bytes, int length) {
return hashBytes(bytes, 0, length);
}
- /** Parse an unsigned short from a byte array. */
+ /**
+ * Parse an unsigned short from a byte array.
+ * @param bytes bytes.
+ * @param start start.
+ * @return unsigned short from a byte array
+ */
public static int readUnsignedShort(byte[] bytes, int start) {
return (((bytes[start] & 0xff) << 8) +
((bytes[start+1] & 0xff)));
}
- /** Parse an integer from a byte array. */
+ /**
+ * Parse an integer from a byte array.
+ * @param bytes bytes.
+ * @param start start.
+ * @return integer from a byte array
+ */
public static int readInt(byte[] bytes, int start) {
return (((bytes[start ] & 0xff) << 24) +
((bytes[start+1] & 0xff) << 16) +
@@ -216,18 +281,33 @@ public class WritableComparator implements RawComparator, Configurable {
}
- /** Parse a float from a byte array. */
+ /**
+ * Parse a float from a byte array.
+ * @param bytes bytes.
+ * @param start start.
+ * @return float from a byte array
+ */
public static float readFloat(byte[] bytes, int start) {
return Float.intBitsToFloat(readInt(bytes, start));
}
- /** Parse a long from a byte array. */
+ /**
+ * Parse a long from a byte array.
+ * @param bytes bytes.
+ * @param start start.
+ * @return long from a byte array
+ */
public static long readLong(byte[] bytes, int start) {
return ((long)(readInt(bytes, start)) << 32) +
(readInt(bytes, start+4) & 0xFFFFFFFFL);
}
- /** Parse a double from a byte array. */
+ /**
+ * Parse a double from a byte array.
+ * @param bytes bytes.
+ * @param start start.
+ * @return double from a byte array.
+ */
public static double readDouble(byte[] bytes, int start) {
return Double.longBitsToDouble(readLong(bytes, start));
}
@@ -236,7 +316,7 @@ public class WritableComparator implements RawComparator, Configurable {
* Reads a zero-compressed encoded long from a byte array and returns it.
* @param bytes byte array with decode long
* @param start starting index
- * @throws java.io.IOException
+ * @throws IOException raised on errors performing I/O.
* @return deserialized long
*/
public static long readVLong(byte[] bytes, int start) throws IOException {
@@ -261,7 +341,7 @@ public class WritableComparator implements RawComparator, Configurable {
* Reads a zero-compressed encoded integer from a byte array and returns it.
* @param bytes byte array with the encoded integer
* @param start start index
- * @throws java.io.IOException
+ * @throws IOException raised on errors performing I/O.
* @return deserialized integer
*/
public static int readVInt(byte[] bytes, int start) throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableFactories.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableFactories.java
index a8fdbfe98df..9dd231e4887 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableFactories.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableFactories.java
@@ -35,17 +35,31 @@ public class WritableFactories {
private WritableFactories() {} // singleton
- /** Define a factory for a class. */
+ /**
+ * Define a factory for a class.
+ * @param c input c.
+ * @param factory input factory.
+ */
public static void setFactory(Class c, WritableFactory factory) {
CLASS_TO_FACTORY.put(c, factory);
}
- /** Define a factory for a class. */
+ /**
+ * Define a factory for a class.
+ * @param c input c.
+ * @return a factory for a class.
+ */
public static WritableFactory getFactory(Class c) {
return CLASS_TO_FACTORY.get(c);
}
- /** Create a new instance of a class with a defined factory. */
+ /**
+ * Create a new instance of a class with a defined factory.
+ *
+ * @param c input c.
+ * @param conf input configuration.
+ * @return a new instance of a class with a defined factory.
+ */
public static Writable newInstance(Class extends Writable> c, Configuration conf) {
WritableFactory factory = WritableFactories.getFactory(c);
if (factory != null) {
@@ -59,7 +73,11 @@ public class WritableFactories {
}
}
- /** Create a new instance of a class with a defined factory. */
+ /**
+ * Create a new instance of a class with a defined factory.
+ * @param c input c.
+ * @return a new instance of a class with a defined factory.
+ */
public static Writable newInstance(Class extends Writable> c) {
return newInstance(c, null);
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableFactory.java
index bb8af974f05..d9e9b543c7d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableFactory.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableFactory.java
@@ -27,7 +27,7 @@ import org.apache.hadoop.classification.InterfaceStability;
@InterfaceAudience.Public
@InterfaceStability.Stable
public interface WritableFactory {
- /** Return a new instance. */
+ /** @return Return a new instance. */
Writable newInstance();
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableName.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableName.java
index 43d396edad7..e5e74875225 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableName.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableName.java
@@ -45,19 +45,33 @@ public class WritableName {
private WritableName() {} // no public ctor
- /** Set the name that a class should be known as to something other than the
- * class name. */
+ /**
+ * Set the name that a class should be known as to something other than the
+ * class name.
+ *
+ * @param writableClass input writableClass.
+ * @param name input name.
+ */
public static synchronized void setName(Class> writableClass, String name) {
CLASS_TO_NAME.put(writableClass, name);
NAME_TO_CLASS.put(name, writableClass);
}
- /** Add an alternate name for a class. */
+ /**
+ * Add an alternate name for a class.
+ * @param writableClass input writableClass.
+ * @param name input name.
+ */
public static synchronized void addName(Class> writableClass, String name) {
NAME_TO_CLASS.put(name, writableClass);
}
- /** Return the name for a class. Default is {@link Class#getName()}. */
+ /**
+ * Return the name for a class.
+ * Default is {@link Class#getName()}.
+ * @param writableClass input writableClass.
+ * @return name for a class.
+ */
public static synchronized String getName(Class> writableClass) {
String name = CLASS_TO_NAME.get(writableClass);
if (name != null)
@@ -65,7 +79,15 @@ public class WritableName {
return writableClass.getName();
}
- /** Return the class for a name. Default is {@link Class#forName(String)}.*/
+ /**
+ * Return the class for a name.
+ * Default is {@link Class#forName(String)}.
+ *
+ * @param name input name.
+ * @param conf input configuration.
+ * @return class for a name.
+ * @throws IOException raised on errors performing I/O.
+ */
public static synchronized Class> getClass(String name, Configuration conf
) throws IOException {
Class> writableClass = NAME_TO_CLASS.get(name);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableUtils.java
index 2062fb6fe37..187398de0ec 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/WritableUtils.java
@@ -208,7 +208,10 @@ public final class WritableUtils {
/**
* Make a copy of a writable object using serialization to a buffer.
+ *
+ * @param Generics Type T.
* @param orig The object to copy
+ * @param conf input Configuration.
* @return The copied object
*/
public static T clone(T orig, Configuration conf) {
@@ -223,10 +226,10 @@ public final class WritableUtils {
}
/**
- * Make a copy of the writable object using serialization to a buffer
+ * Make a copy of the writable object using serialization to a buffer.
* @param dst the object to copy from
* @param src the object to copy into, which is destroyed
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
* @deprecated use ReflectionUtils.cloneInto instead.
*/
@Deprecated
@@ -248,7 +251,7 @@ public final class WritableUtils {
*
* @param stream Binary output stream
* @param i Integer to be serialized
- * @throws java.io.IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static void writeVInt(DataOutput stream, int i) throws IOException {
writeVLong(stream, i);
@@ -268,7 +271,7 @@ public final class WritableUtils {
*
* @param stream Binary output stream
* @param i Long to be serialized
- * @throws java.io.IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static void writeVLong(DataOutput stream, long i) throws IOException {
if (i >= -112 && i <= 127) {
@@ -303,7 +306,7 @@ public final class WritableUtils {
/**
* Reads a zero-compressed encoded long from input stream and returns it.
* @param stream Binary input stream
- * @throws java.io.IOException
+ * @throws IOException raised on errors performing I/O.
* @return deserialized long from stream.
*/
public static long readVLong(DataInput stream) throws IOException {
@@ -324,7 +327,7 @@ public final class WritableUtils {
/**
* Reads a zero-compressed encoded integer from input stream and returns it.
* @param stream Binary input stream
- * @throws java.io.IOException
+ * @throws IOException raised on errors performing I/O.
* @return deserialized integer from stream.
*/
public static int readVInt(DataInput stream) throws IOException {
@@ -342,8 +345,10 @@ public final class WritableUtils {
* inclusive.
*
* @param stream Binary input stream
- * @throws java.io.IOException
- * @return deserialized integer from stream
+ * @param lower input lower.
+ * @param upper input upper.
+ * @throws IOException raised on errors performing I/O.
+ * @return deserialized integer from stream.
*/
public static int readVIntInRange(DataInput stream, int lower, int upper)
throws IOException {
@@ -387,7 +392,8 @@ public final class WritableUtils {
}
/**
- * Get the encoded length if an integer is stored in a variable-length format
+ * Get the encoded length if an integer is stored in a variable-length format.
+ * @param i input i.
* @return the encoded length
*/
public static int getVIntSize(long i) {
@@ -410,7 +416,7 @@ public final class WritableUtils {
* @param in DataInput to read from
* @param enumType Class type of Enum
* @return Enum represented by String read from DataInput
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static > T readEnum(DataInput in, Class enumType)
throws IOException{
@@ -420,7 +426,7 @@ public final class WritableUtils {
* writes String value of enum to DataOutput.
* @param out Dataoutput stream
* @param enumVal enum value
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static void writeEnum(DataOutput out, Enum> enumVal)
throws IOException{
@@ -446,7 +452,11 @@ public final class WritableUtils {
}
}
- /** Convert writables to a byte array */
+ /**
+ * Convert writables to a byte array.
+ * @param writables input writables.
+ * @return ByteArray.
+ */
public static byte[] toByteArray(Writable... writables) {
final DataOutputBuffer out = new DataOutputBuffer();
try {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java
index 7fd5633daa6..7640f7ed7a6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BZip2Codec.java
@@ -99,7 +99,7 @@ public class BZip2Codec implements Configurable, SplittableCompressionCodec {
* @param out the location for the final output stream
* @return a stream the user can write uncompressed data to, to have it
* compressed
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionOutputStream createOutputStream(OutputStream out)
@@ -116,7 +116,7 @@ public class BZip2Codec implements Configurable, SplittableCompressionCodec {
* @param compressor compressor to use
* @return a stream the user can write uncompressed data to, to have it
* compressed
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionOutputStream createOutputStream(OutputStream out,
@@ -154,7 +154,7 @@ public class BZip2Codec implements Configurable, SplittableCompressionCodec {
*
* @param in the stream to read compressed bytes from
* @return a stream to read uncompressed bytes from
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionInputStream createInputStream(InputStream in)
@@ -171,7 +171,7 @@ public class BZip2Codec implements Configurable, SplittableCompressionCodec {
* @param in the stream to read compressed bytes from
* @param decompressor decompressor to use
* @return a stream to read uncompressed bytes from
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionInputStream createInputStream(InputStream in,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java
index de457d19240..ff10332ea8d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/BlockDecompressorStream.java
@@ -43,7 +43,7 @@ public class BlockDecompressorStream extends DecompressorStream {
* @param in input stream
* @param decompressor decompressor to use
* @param bufferSize size of buffer
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public BlockDecompressorStream(InputStream in, Decompressor decompressor,
int bufferSize) throws IOException {
@@ -55,7 +55,7 @@ public class BlockDecompressorStream extends DecompressorStream {
*
* @param in input stream
* @param decompressor decompressor to use
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public BlockDecompressorStream(InputStream in, Decompressor decompressor) throws IOException {
super(in, decompressor);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java
index 2ac2ca65173..69e8c99a1f4 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CodecPool.java
@@ -235,7 +235,10 @@ public class CodecPool {
/**
* Return the number of leased {@link Compressor}s for this
- * {@link CompressionCodec}
+ * {@link CompressionCodec}.
+ *
+ * @param codec codec.
+ * @return the number of leased.
*/
public static int getLeasedCompressorsCount(CompressionCodec codec) {
return (codec == null) ? 0 : getLeaseCount(compressorCounts,
@@ -244,7 +247,10 @@ public class CodecPool {
/**
* Return the number of leased {@link Decompressor}s for this
- * {@link CompressionCodec}
+ * {@link CompressionCodec}.
+ *
+ * @param codec codec.
+ * @return the number of leased
*/
public static int getLeasedDecompressorsCount(CompressionCodec codec) {
return (codec == null) ? 0 : getLeaseCount(decompressorCounts,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodec.java
index f37aadfcb57..d064e1b9147 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodec.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodec.java
@@ -39,7 +39,7 @@ public interface CompressionCodec {
*
* @param out the location for the final output stream
* @return a stream the user can write uncompressed data to have it compressed
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
CompressionOutputStream createOutputStream(OutputStream out)
throws IOException;
@@ -51,7 +51,7 @@ public interface CompressionCodec {
* @param out the location for the final output stream
* @param compressor compressor to use
* @return a stream the user can write uncompressed data to have it compressed
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
CompressionOutputStream createOutputStream(OutputStream out,
Compressor compressor)
@@ -77,7 +77,7 @@ public interface CompressionCodec {
*
* @param in the stream to read compressed bytes from
* @return a stream to read uncompressed bytes from
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
CompressionInputStream createInputStream(InputStream in) throws IOException;
@@ -88,7 +88,7 @@ public interface CompressionCodec {
* @param in the stream to read compressed bytes from
* @param decompressor decompressor to use
* @return a stream to read uncompressed bytes from
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
CompressionInputStream createInputStream(InputStream in,
Decompressor decompressor)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
index a195ed4e77f..8e920a2e64c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionCodecFactory.java
@@ -171,6 +171,8 @@ public class CompressionCodecFactory {
/**
* Find the codecs specified in the config value io.compression.codecs
* and register them. Defaults to gzip and deflate.
+ *
+ * @param conf configuration.
*/
public CompressionCodecFactory(Configuration conf) {
codecs = new TreeMap();
@@ -293,7 +295,8 @@ public class CompressionCodecFactory {
/**
* A little test program.
- * @param args
+ * @param args arguments.
+ * @throws Exception exception.
*/
public static void main(String[] args) throws Exception {
Configuration conf = new Configuration();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java
index 55bb132e9c8..5bfec01ec94 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionInputStream.java
@@ -53,7 +53,7 @@ public abstract class CompressionInputStream extends InputStream
* the decompressed bytes from the given stream.
*
* @param in The input stream to be compressed.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
protected CompressionInputStream(InputStream in) throws IOException {
if (!(in instanceof Seekable) || !(in instanceof PositionedReadable)) {
@@ -93,6 +93,8 @@ public abstract class CompressionInputStream extends InputStream
/**
* Reset the decompressor to its initial state and discard any buffered data,
* as the underlying stream may have been repositioned.
+ *
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void resetState() throws IOException;
@@ -118,7 +120,7 @@ public abstract class CompressionInputStream extends InputStream
/**
* This method is current not supported.
*
- * @throws UnsupportedOperationException
+ * @throws UnsupportedOperationException Unsupported Operation Exception.
*/
@Override
@@ -129,7 +131,7 @@ public abstract class CompressionInputStream extends InputStream
/**
* This method is current not supported.
*
- * @throws UnsupportedOperationException
+ * @throws UnsupportedOperationException Unsupported Operation Exception.
*/
@Override
public boolean seekToNewSource(long targetPos) throws UnsupportedOperationException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java
index 2a11ace8170..2e412dcd58f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/CompressionOutputStream.java
@@ -48,7 +48,7 @@ public abstract class CompressionOutputStream extends OutputStream
/**
* Create a compression output stream that writes
* the compressed bytes to the given stream.
- * @param out
+ * @param out out.
*/
protected CompressionOutputStream(OutputStream out) {
this.out = out;
@@ -89,12 +89,14 @@ public abstract class CompressionOutputStream extends OutputStream
/**
* Finishes writing compressed data to the output stream
* without closing the underlying stream.
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void finish() throws IOException;
/**
* Reset the compression to the initial state.
* Does not reset the underlying stream.
+ * @throws IOException raised on errors performing I/O.
*/
public abstract void resetState() throws IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Compressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Compressor.java
index 537837faa0a..7e2a6e679f4 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Compressor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Compressor.java
@@ -65,11 +65,13 @@ public interface Compressor {
/**
* Return number of uncompressed bytes input so far.
+ * @return bytes read.
*/
public long getBytesRead();
/**
* Return number of compressed bytes output so far.
+ * @return bytes written.
*/
public long getBytesWritten();
@@ -97,6 +99,7 @@ public interface Compressor {
* @param off Start offset of the data
* @param len Size of the buffer
* @return The actual number of bytes of compressed data.
+ * @throws IOException raised on errors performing I/O.
*/
public int compress(byte[] b, int off, int len) throws IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Decompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Decompressor.java
index e9558fab873..30d4e29892e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Decompressor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Decompressor.java
@@ -96,7 +96,7 @@ public interface Decompressor {
* @param off Start offset of the data
* @param len Size of the buffer
* @return The actual number of bytes of uncompressed data.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public int decompress(byte[] b, int off, int len) throws IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java
index 570d15c7f16..745105ce873 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/DecompressorStream.java
@@ -80,7 +80,7 @@ public class DecompressorStream extends CompressionInputStream {
* Allow derived classes to directly set the underlying stream.
*
* @param in Underlying input stream.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
protected DecompressorStream(InputStream in) throws IOException {
super(in);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java
index 8bfb7fe95c4..a5afb706c99 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/Lz4Codec.java
@@ -61,7 +61,7 @@ public class Lz4Codec implements Configurable, CompressionCodec {
*
* @param out the location for the final output stream
* @return a stream the user can write uncompressed data to have it compressed
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionOutputStream createOutputStream(OutputStream out)
@@ -77,7 +77,7 @@ public class Lz4Codec implements Configurable, CompressionCodec {
* @param out the location for the final output stream
* @param compressor compressor to use
* @return a stream the user can write uncompressed data to have it compressed
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionOutputStream createOutputStream(OutputStream out,
@@ -125,7 +125,7 @@ public class Lz4Codec implements Configurable, CompressionCodec {
*
* @param in the stream to read compressed bytes from
* @return a stream to read uncompressed bytes from
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionInputStream createInputStream(InputStream in)
@@ -141,7 +141,7 @@ public class Lz4Codec implements Configurable, CompressionCodec {
* @param in the stream to read compressed bytes from
* @param decompressor decompressor to use
* @return a stream to read uncompressed bytes from
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionInputStream createInputStream(InputStream in,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java
index 77cf36a339b..d64c6e512f8 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SnappyCodec.java
@@ -61,7 +61,7 @@ public class SnappyCodec implements Configurable, CompressionCodec, DirectDecomp
*
* @param out the location for the final output stream
* @return a stream the user can write uncompressed data to have it compressed
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionOutputStream createOutputStream(OutputStream out)
@@ -77,7 +77,7 @@ public class SnappyCodec implements Configurable, CompressionCodec, DirectDecomp
* @param out the location for the final output stream
* @param compressor compressor to use
* @return a stream the user can write uncompressed data to have it compressed
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionOutputStream createOutputStream(OutputStream out,
@@ -122,7 +122,7 @@ public class SnappyCodec implements Configurable, CompressionCodec, DirectDecomp
*
* @param in the stream to read compressed bytes from
* @return a stream to read uncompressed bytes from
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionInputStream createInputStream(InputStream in)
@@ -138,7 +138,7 @@ public class SnappyCodec implements Configurable, CompressionCodec, DirectDecomp
* @param in the stream to read compressed bytes from
* @param decompressor decompressor to use
* @return a stream to read uncompressed bytes from
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionInputStream createInputStream(InputStream in,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SplittableCompressionCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SplittableCompressionCodec.java
index a756f47260c..f2e28774a46 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SplittableCompressionCodec.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/SplittableCompressionCodec.java
@@ -61,6 +61,7 @@ public interface SplittableCompressionCodec extends CompressionCodec {
* Create a stream as dictated by the readMode. This method is used when
* the codecs wants the ability to work with the underlying stream positions.
*
+ * @param decompressor decompressor.
* @param seekableIn The seekable input stream (seeks in compressed data)
* @param start The start offset into the compressed stream. May be changed
* by the underlying codec.
@@ -69,6 +70,7 @@ public interface SplittableCompressionCodec extends CompressionCodec {
* @param readMode Controls whether stream position is reported continuously
* from the compressed stream only only at block boundaries.
* @return a stream to read uncompressed bytes from
+ * @throws IOException raised on errors performing I/O.
*/
SplitCompressionInputStream createInputStream(InputStream seekableIn,
Decompressor decompressor, long start, long end, READ_MODE readMode)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/ZStandardCodec.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/ZStandardCodec.java
index a7afebc0c49..139e81eb73c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/ZStandardCodec.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/ZStandardCodec.java
@@ -116,7 +116,7 @@ public class ZStandardCodec implements
*
* @param out the location for the final output stream
* @return a stream the user can write uncompressed data to have compressed
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionOutputStream createOutputStream(OutputStream out)
@@ -132,7 +132,7 @@ public class ZStandardCodec implements
* @param out the location for the final output stream
* @param compressor compressor to use
* @return a stream the user can write uncompressed data to have compressed
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionOutputStream createOutputStream(OutputStream out,
@@ -173,7 +173,7 @@ public class ZStandardCodec implements
*
* @param in the stream to read compressed bytes from
* @return a stream to read uncompressed bytes from
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionInputStream createInputStream(InputStream in)
@@ -189,7 +189,7 @@ public class ZStandardCodec implements
* @param in the stream to read compressed bytes from
* @param decompressor decompressor to use
* @return a stream to read uncompressed bytes from
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public CompressionInputStream createInputStream(InputStream in,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java
index 5713c56df6a..9d1d8533248 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Compressor.java
@@ -67,6 +67,7 @@ public class Bzip2Compressor implements Compressor {
/**
* Creates a new compressor, taking settings from the configuration.
+ * @param conf configuration.
*/
public Bzip2Compressor(Configuration conf) {
this(Bzip2Factory.getBlockSize(conf),
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java
index 72ba97630e2..acd806b9b30 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/Bzip2Decompressor.java
@@ -50,6 +50,8 @@ public class Bzip2Decompressor implements Decompressor {
/**
* Creates a new decompressor.
+ * @param conserveMemory conserveMemory.
+ * @param directBufferSize directBufferSize.
*/
public Bzip2Decompressor(boolean conserveMemory, int directBufferSize) {
this.conserveMemory = conserveMemory;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java
index 8426d25c295..187fe481588 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2InputStream.java
@@ -152,6 +152,7 @@ public class CBZip2InputStream extends InputStream implements BZip2Constants {
* This method reports the processed bytes so far. Please note that this
* statistic is only updated on block boundaries and only when the stream is
* initiated in BYBLOCK mode.
+ * @return ProcessedByteCount.
*/
public long getProcessedByteCount() {
return reportedBytesReadFromCompressedStream;
@@ -209,7 +210,7 @@ public class CBZip2InputStream extends InputStream implements BZip2Constants {
* @param marker The bit pattern to be found in the stream
* @param markerBitLength No of bits in the marker
* @return true if the marker was found otherwise false
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
* @throws IllegalArgumentException if marketBitLength is greater than 63
*/
public boolean skipToNextMarker(long marker, int markerBitLength)
@@ -282,7 +283,8 @@ public class CBZip2InputStream extends InputStream implements BZip2Constants {
* the magic. Thus callers have to skip the first two bytes. Otherwise this
* constructor will throw an exception.
*
- *
+ * @param in in.
+ * @param readMode READ_MODE.
* @throws IOException
* if the stream content is malformed or an I/O error occurs.
* @throws NullPointerException
@@ -326,7 +328,7 @@ public class CBZip2InputStream extends InputStream implements BZip2Constants {
*
* @return long Number of bytes between current stream position and the
* next BZip2 block start marker.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*
*/
public static long numberOfBytesTillNextMarker(final InputStream in) throws IOException{
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java
index 850fec77c51..39c3638b0f4 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/bzip2/CBZip2OutputStream.java
@@ -64,13 +64,10 @@ import org.apache.hadoop.io.IOUtils;
*
*
*
- *
+ * Memory usage by blocksize
*
*
*
- * Memory usage by blocksize
- *
- *
* Blocksize Compression
* memory usage Decompression
* memory usage
@@ -213,6 +210,10 @@ public class CBZip2OutputStream extends OutputStream implements BZip2Constants {
/**
* This method is accessible by subclasses for historical purposes. If you
* don't know what it does then you don't need it.
+ * @param len len.
+ * @param freq freq.
+ * @param alphaSize alphaSize.
+ * @param maxLen maxLen.
*/
protected static void hbMakeCodeLengths(char[] len, int[] freq,
int alphaSize, int maxLen) {
@@ -849,6 +850,7 @@ public class CBZip2OutputStream extends OutputStream implements BZip2Constants {
/**
* Returns the blocksize parameter specified at construction time.
+ * @return blocksize.
*/
public final int getBlockSize() {
return this.blockSize100k;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java
index 2b62ef78b28..719d216abae 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/lz4/Lz4Decompressor.java
@@ -199,7 +199,7 @@ public class Lz4Decompressor implements Decompressor {
* @param off Start offset of the data
* @param len Size of the buffer
* @return The actual number of bytes of uncompressed data.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public synchronized int decompress(byte[] b, int off, int len)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java
index d3775e286e8..58987c4dda3 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/snappy/SnappyDecompressor.java
@@ -187,7 +187,7 @@ public class SnappyDecompressor implements Decompressor {
* @param off Start offset of the data
* @param len Size of the buffer
* @return The actual number of bytes of compressed data.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Override
public int decompress(byte[] b, int off, int len)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java
index da8a90bb317..89e05fc6d07 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibCompressor.java
@@ -240,6 +240,7 @@ public class ZlibCompressor implements Compressor {
/**
* Creates a new compressor, taking settings from the configuration.
+ * @param conf configuration.
*/
public ZlibCompressor(Configuration conf) {
this(ZlibFactory.getCompressionLevel(conf),
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java
index f642d771303..c2615548d23 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibDecompressor.java
@@ -101,6 +101,8 @@ public class ZlibDecompressor implements Decompressor {
/**
* Creates a new decompressor.
+ * @param header header.
+ * @param directBufferSize directBufferSize.
*/
public ZlibDecompressor(CompressionHeader header, int directBufferSize) {
this.header = header;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java
index 883f1717eea..c2de494457c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zlib/ZlibFactory.java
@@ -66,7 +66,7 @@ public class ZlibFactory {
/**
* Set the flag whether to use native library. Used for testing non-native
* libraries
- *
+ * @param isLoaded isLoaded.
*/
@VisibleForTesting
public static void setNativeZlibLoaded(final boolean isLoaded) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardCompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardCompressor.java
index bc51f3d98a5..a77b59640cd 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardCompressor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardCompressor.java
@@ -84,6 +84,8 @@ public class ZStandardCompressor implements Compressor {
/**
* Creates a new compressor with the default compression level.
* Compressed data will be generated in ZStandard format.
+ * @param level level.
+ * @param bufferSize bufferSize.
*/
public ZStandardCompressor(int level, int bufferSize) {
this(level, bufferSize, bufferSize);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardDecompressor.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardDecompressor.java
index adf2fe629f8..792547a62fa 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardDecompressor.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/compress/zstd/ZStandardDecompressor.java
@@ -73,6 +73,7 @@ public class ZStandardDecompressor implements Decompressor {
/**
* Creates a new decompressor.
+ * @param bufferSize bufferSize.
*/
public ZStandardDecompressor(int bufferSize) {
this.directBufferSize = bufferSize;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java
index 2632f4b82f0..f89a0d9812d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/CodecUtil.java
@@ -83,6 +83,7 @@ public final class CodecUtil {
/**
* Create encoder corresponding to given codec.
* @param options Erasure codec options
+ * @param conf configuration.
* @return erasure encoder
*/
public static ErasureEncoder createEncoder(Configuration conf,
@@ -100,6 +101,7 @@ public final class CodecUtil {
/**
* Create decoder corresponding to given codec.
* @param options Erasure codec options
+ * @param conf configuration.
* @return erasure decoder
*/
public static ErasureDecoder createDecoder(Configuration conf,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ErasureCodeNative.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ErasureCodeNative.java
index ec317eee4dc..83a31512820 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ErasureCodeNative.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/ErasureCodeNative.java
@@ -61,6 +61,7 @@ public final class ErasureCodeNative {
/**
* Are native libraries loaded?
+ * @return if is native code loaded true,not false.
*/
public static boolean isNativeCodeLoaded() {
return LOADING_FAILURE_REASON == null;
@@ -82,6 +83,7 @@ public final class ErasureCodeNative {
/**
* Get the native library name that's available or supported.
+ * @return library name.
*/
public static native String getLibraryName();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCoder.java
index b5ae1f1e399..ab1775538bd 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCoder.java
@@ -68,6 +68,7 @@ public interface ErasureCoder extends Configurable {
*
* @param blockGroup the erasure coding block group containing all necessary
* information for codec calculation
+ * @return ErasureCodingStep.
*/
ErasureCodingStep calculateCoding(ECBlockGroup blockGroup);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCodingStep.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCodingStep.java
index fb89d99a054..333647c982b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCodingStep.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureCodingStep.java
@@ -46,8 +46,9 @@ public interface ErasureCodingStep {
/**
* Perform encoding or decoding given the input chunks, and generated results
* will be written to the output chunks.
- * @param inputChunks
- * @param outputChunks
+ * @param inputChunks inputChunks.
+ * @param outputChunks outputChunks.
+ * @throws IOException raised on errors performing I/O.
*/
void performCoding(ECChunk[] inputChunks, ECChunk[] outputChunks)
throws IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecoder.java
index 004fd38df11..30020b9959f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecoder.java
@@ -65,7 +65,7 @@ public abstract class ErasureDecoder extends Configured
/**
* We have all the data blocks and parity blocks as input blocks for
* recovering by default. It's codec specific
- * @param blockGroup
+ * @param blockGroup blockGroup.
* @return input blocks
*/
protected ECBlock[] getInputBlocks(ECBlockGroup blockGroup) {
@@ -83,7 +83,7 @@ public abstract class ErasureDecoder extends Configured
/**
* Which blocks were erased ?
- * @param blockGroup
+ * @param blockGroup blockGroup.
* @return output blocks to recover
*/
protected ECBlock[] getOutputBlocks(ECBlockGroup blockGroup) {
@@ -118,7 +118,7 @@ public abstract class ErasureDecoder extends Configured
/**
* Perform decoding against a block blockGroup.
- * @param blockGroup
+ * @param blockGroup blockGroup.
* @return decoding step for caller to do the real work
*/
protected abstract ErasureCodingStep prepareDecodingStep(
@@ -126,7 +126,7 @@ public abstract class ErasureDecoder extends Configured
/**
* Get the number of erased blocks in the block group.
- * @param blockGroup
+ * @param blockGroup blockGroup.
* @return number of erased blocks
*/
protected int getNumErasedBlocks(ECBlockGroup blockGroup) {
@@ -153,7 +153,7 @@ public abstract class ErasureDecoder extends Configured
/**
* Get indexes of erased blocks from inputBlocks
- * @param inputBlocks
+ * @param inputBlocks inputBlocks.
* @return indexes of erased blocks from inputBlocks
*/
protected int[] getErasedIndexes(ECBlock[] inputBlocks) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecodingStep.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecodingStep.java
index 24f55470e17..20a396d3136 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecodingStep.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureDecodingStep.java
@@ -37,10 +37,10 @@ public class ErasureDecodingStep implements ErasureCodingStep {
/**
* The constructor with all the necessary info.
- * @param inputBlocks
+ * @param inputBlocks inputBlocks.
* @param erasedIndexes the indexes of erased blocks in inputBlocks array
- * @param outputBlocks
- * @param rawDecoder
+ * @param outputBlocks outputBlocks.
+ * @param rawDecoder rawDecoder.
*/
public ErasureDecodingStep(ECBlock[] inputBlocks, int[] erasedIndexes,
ECBlock[] outputBlocks,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncoder.java
index 81666e9b76b..cca272f69a2 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncoder.java
@@ -83,7 +83,7 @@ public abstract class ErasureEncoder extends Configured
/**
* Perform encoding against a block group.
- * @param blockGroup
+ * @param blockGroup blockGroup.
* @return encoding step for caller to do the real work
*/
protected abstract ErasureCodingStep prepareEncodingStep(
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncodingStep.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncodingStep.java
index 5fc5c7a0992..9e696d2c584 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncodingStep.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/ErasureEncodingStep.java
@@ -36,9 +36,9 @@ public class ErasureEncodingStep implements ErasureCodingStep {
/**
* The constructor with all the necessary info.
- * @param inputBlocks
- * @param outputBlocks
- * @param rawEncoder
+ * @param inputBlocks inputBlocks.
+ * @param outputBlocks outputBlocks.
+ * @param rawEncoder rawEncoder.
*/
public ErasureEncodingStep(ECBlock[] inputBlocks, ECBlock[] outputBlocks,
RawErasureEncoder rawEncoder) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHErasureCodingStep.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHErasureCodingStep.java
index a0f5b727106..46f0a76da17 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHErasureCodingStep.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHErasureCodingStep.java
@@ -38,8 +38,8 @@ public abstract class HHErasureCodingStep
/**
* Constructor given input blocks and output blocks.
*
- * @param inputBlocks
- * @param outputBlocks
+ * @param inputBlocks inputBlocks.
+ * @param outputBlocks outputBlocks.
*/
public HHErasureCodingStep(ECBlock[] inputBlocks,
ECBlock[] outputBlocks) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureDecodingStep.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureDecodingStep.java
index 16a3c0fa61c..4d594f476df 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureDecodingStep.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureDecodingStep.java
@@ -43,9 +43,9 @@ public class HHXORErasureDecodingStep extends HHErasureCodingStep {
/**
* The constructor with all the necessary info.
- * @param inputBlocks
+ * @param inputBlocks inputBlocks.
* @param erasedIndexes the indexes of erased blocks in inputBlocks array
- * @param outputBlocks
+ * @param outputBlocks outputBlocks.
* @param rawDecoder underlying RS decoder for hitchhiker decoding
* @param rawEncoder underlying XOR encoder for hitchhiker decoding
*/
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureEncodingStep.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureEncodingStep.java
index 6a564427011..f571e932b6a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureEncodingStep.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/HHXORErasureEncodingStep.java
@@ -40,8 +40,8 @@ public class HHXORErasureEncodingStep extends HHErasureCodingStep {
/**
* The constructor with all the necessary info.
*
- * @param inputBlocks
- * @param outputBlocks
+ * @param inputBlocks inputBlocks.
+ * @param outputBlocks outputBlocks.
* @param rsRawEncoder underlying RS encoder for hitchhiker encoding
* @param xorRawEncoder underlying XOR encoder for hitchhiker encoding
*/
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/XORErasureDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/XORErasureDecoder.java
index 1a0e5c030e0..9aae5e43c86 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/XORErasureDecoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/XORErasureDecoder.java
@@ -53,7 +53,7 @@ public class XORErasureDecoder extends ErasureDecoder {
/**
* Which blocks were erased ? For XOR it's simple we only allow and return one
* erased block, either data or parity.
- * @param blockGroup
+ * @param blockGroup blockGroup.
* @return output blocks to recover
*/
@Override
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/util/HHUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/util/HHUtil.java
index 91d02415bfd..7f771c9677d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/util/HHUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/coder/util/HHUtil.java
@@ -202,6 +202,8 @@ public final class HHUtil {
/**
* Find the valid input from all the inputs.
+ *
+ * @param Generics Type T.
* @param inputs input buffers to look for valid input
* @return the first valid input
*/
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/grouper/BlockGrouper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/grouper/BlockGrouper.java
index 3f1b0c22941..0407d161208 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/grouper/BlockGrouper.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/grouper/BlockGrouper.java
@@ -33,7 +33,7 @@ public class BlockGrouper {
/**
* Set EC schema.
- * @param schema
+ * @param schema schema.
*/
public void setSchema(ECSchema schema) {
this.schema = schema;
@@ -41,7 +41,7 @@ public class BlockGrouper {
/**
* Get EC schema.
- * @return
+ * @return ECSchema.
*/
protected ECSchema getSchema() {
return schema;
@@ -67,7 +67,7 @@ public class BlockGrouper {
* Calculating and organizing BlockGroup, to be called by ECManager
* @param dataBlocks Data blocks to compute parity blocks against
* @param parityBlocks To be computed parity blocks
- * @return
+ * @return ECBlockGroup.
*/
public ECBlockGroup makeBlockGroup(ECBlock[] dataBlocks,
ECBlock[] parityBlocks) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/DecodingValidator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/DecodingValidator.java
index 396aac08cc5..a9bc2977398 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/DecodingValidator.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/DecodingValidator.java
@@ -68,7 +68,7 @@ public class DecodingValidator {
* @param erasedIndexes indexes of erased units used for decoding
* @param outputs decoded output buffers, which are ready to be read after
* the call
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void validate(ByteBuffer[] inputs, int[] erasedIndexes,
ByteBuffer[] outputs) throws IOException {
@@ -133,7 +133,7 @@ public class DecodingValidator {
* @param inputs input buffers used for decoding
* @param erasedIndexes indexes of erased units used for decoding
* @param outputs decoded output buffers
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void validate(ECChunk[] inputs, int[] erasedIndexes, ECChunk[] outputs)
throws IOException {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureDecoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureDecoder.java
index 2ebe94b0385..329bf7c3aaf 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureDecoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureDecoder.java
@@ -80,6 +80,7 @@ public abstract class RawErasureDecoder {
* @param erasedIndexes indexes of erased units in the inputs array
* @param outputs output buffers to put decoded data into according to
* erasedIndexes, ready for read after the call
+ * @throws IOException raised on errors performing I/O.
*/
public synchronized void decode(ByteBuffer[] inputs, int[] erasedIndexes,
ByteBuffer[] outputs) throws IOException {
@@ -117,6 +118,7 @@ public abstract class RawErasureDecoder {
/**
* Perform the real decoding using Direct ByteBuffer.
* @param decodingState the decoding state
+ * @throws IOException raised on errors performing I/O.
*/
protected abstract void doDecode(ByteBufferDecodingState decodingState)
throws IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureEncoder.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureEncoder.java
index 6d2ecd20525..d5ccb12c9d6 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureEncoder.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/RawErasureEncoder.java
@@ -100,7 +100,8 @@ public abstract class RawErasureEncoder {
/**
* Perform the real encoding work using direct ByteBuffer.
- * @param encodingState the encoding state
+ * @param encodingState the encoding state.
+ * @throws IOException raised on errors performing I/O.
*/
protected abstract void doEncode(ByteBufferEncodingState encodingState)
throws IOException;
@@ -111,6 +112,7 @@ public abstract class RawErasureEncoder {
* @param inputs input buffers to read data from
* @param outputs output buffers to put the encoded data into, read to read
* after the call
+ * @throws IOException raised on errors performing I/O.
*/
public void encode(byte[][] inputs, byte[][] outputs) throws IOException {
ByteArrayEncodingState baeState = new ByteArrayEncodingState(
@@ -128,6 +130,7 @@ public abstract class RawErasureEncoder {
* Perform the real encoding work using bytes array, supporting offsets
* and lengths.
* @param encodingState the encoding state
+ * @throws IOException raised on errors performing I/O.
*/
protected abstract void doEncode(ByteArrayEncodingState encodingState)
throws IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/DumpUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/DumpUtil.java
index 6de07161743..90e57201c54 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/DumpUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/DumpUtil.java
@@ -36,6 +36,10 @@ public final class DumpUtil {
/**
* Convert bytes into format like 0x02 02 00 80.
* If limit is negative or too large, then all bytes will be converted.
+ *
+ * @param bytes bytes.
+ * @param limit limit.
+ * @return bytesToHex.
*/
public static String bytesToHex(byte[] bytes, int limit) {
if (limit <= 0 || limit > bytes.length) {
@@ -70,8 +74,8 @@ public final class DumpUtil {
/**
* Print data in hex format in an array of chunks.
- * @param header
- * @param chunks
+ * @param header header.
+ * @param chunks chunks.
*/
public static void dumpChunks(String header, ECChunk[] chunks) {
System.out.println();
@@ -84,7 +88,7 @@ public final class DumpUtil {
/**
* Print data in hex format in a chunk.
- * @param chunk
+ * @param chunk chunk.
*/
public static void dumpChunk(ECChunk chunk) {
String str;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GF256.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GF256.java
index 35534f307a7..b48a23f8b70 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GF256.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GF256.java
@@ -195,6 +195,10 @@ public final class GF256 {
* Invert a matrix assuming it's invertible.
*
* Ported from Intel ISA-L library.
+ *
+ * @param inMatrix inMatrix.
+ * @param outMatrix outMatrix.
+ * @param n n
*/
public static void gfInvertMatrix(byte[] inMatrix, byte[] outMatrix, int n) {
byte temp;
@@ -262,7 +266,11 @@ public final class GF256 {
*
* Calculates const table gftbl in GF(2^8) from single input A
* gftbl(A) = {A{00}, A{01}, A{02}, ... , A{0f} }, {A{00}, A{10}, A{20},
- * ... , A{f0} } -- from ISA-L implementation
+ * ... , A{f0} } -- from ISA-L implementation.
+ *
+ * @param c c.
+ * @param tbl tbl.
+ * @param offset offset.
*/
public static void gfVectMulInit(byte c, byte[] tbl, int offset) {
byte c2 = (byte) ((c << 1) ^ ((c & 0x80) != 0 ? 0x1d : 0));
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GaloisField.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GaloisField.java
index f80fceca94c..6d22ff0f62e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GaloisField.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/GaloisField.java
@@ -93,10 +93,11 @@ public class GaloisField {
}
/**
- * Get the object performs Galois field arithmetics
+ * Get the object performs Galois field arithmetics.
*
* @param fieldSize size of the field
* @param primitivePolynomial a primitive polynomial corresponds to the size
+ * @return GaloisField.
*/
public static GaloisField getInstance(int fieldSize,
int primitivePolynomial) {
@@ -114,7 +115,8 @@ public class GaloisField {
}
/**
- * Get the object performs Galois field arithmetic with default setting
+ * Get the object performs Galois field arithmetic with default setting.
+ * @return GaloisField.
*/
public static GaloisField getInstance() {
return getInstance(DEFAULT_FIELD_SIZE, DEFAULT_PRIMITIVE_POLYNOMIAL);
@@ -236,7 +238,13 @@ public class GaloisField {
}
/**
- * A "bulk" version to the solving of Vandermonde System
+ * A "bulk" version to the solving of Vandermonde System.
+ *
+ * @param x input x.
+ * @param y input y.
+ * @param outputOffsets input outputOffsets.
+ * @param len input len.
+ * @param dataLen input dataLen.
*/
public void solveVandermondeSystem(int[] x, byte[][] y, int[] outputOffsets,
int len, int dataLen) {
@@ -269,6 +277,10 @@ public class GaloisField {
/**
* A "bulk" version of the solveVandermondeSystem, using ByteBuffer.
+ *
+ * @param x input x.
+ * @param y input y.
+ * @param len input len.
*/
public void solveVandermondeSystem(int[] x, ByteBuffer[] y, int len) {
ByteBuffer p;
@@ -413,10 +425,10 @@ public class GaloisField {
* Tends to be 2X faster than the "int" substitute in a loop.
*
* @param p input polynomial
- * @param offsets
- * @param len
+ * @param offsets input offset.
+ * @param len input len.
* @param q store the return result
- * @param offset
+ * @param offset input offset.
* @param x input field
*/
public void substitute(byte[][] p, int[] offsets,
@@ -440,6 +452,7 @@ public class GaloisField {
* @param p input polynomial
* @param q store the return result
* @param x input field
+ * @param len input len.
*/
public void substitute(ByteBuffer[] p, int len, ByteBuffer q, int x) {
int y = 1, iIdx, oIdx;
@@ -459,6 +472,9 @@ public class GaloisField {
/**
* The "bulk" version of the remainder.
* Warning: This function will modify the "dividend" inputs.
+ *
+ * @param divisor divisor.
+ * @param dividend dividend.
*/
public void remainder(byte[][] dividend, int[] divisor) {
for (int i = dividend.length - divisor.length; i >= 0; i--) {
@@ -476,6 +492,11 @@ public class GaloisField {
/**
* The "bulk" version of the remainder.
* Warning: This function will modify the "dividend" inputs.
+ *
+ * @param dividend dividend.
+ * @param offsets offsets.
+ * @param len len.
+ * @param divisor divisor.
*/
public void remainder(byte[][] dividend, int[] offsets,
int len, int[] divisor) {
@@ -497,6 +518,9 @@ public class GaloisField {
/**
* The "bulk" version of the remainder, using ByteBuffer.
* Warning: This function will modify the "dividend" inputs.
+ *
+ * @param dividend dividend.
+ * @param divisor divisor.
*/
public void remainder(ByteBuffer[] dividend, int[] divisor) {
int idx1, idx2;
@@ -519,6 +543,8 @@ public class GaloisField {
/**
* Perform Gaussian elimination on the given matrix. This matrix has to be a
* fat matrix (number of rows > number of columns).
+ *
+ * @param matrix matrix.
*/
public void gaussianElimination(int[][] matrix) {
assert(matrix != null && matrix.length > 0 && matrix[0].length > 0
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/RSUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/RSUtil.java
index 43823d0f8c3..b1fdc82a116 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/RSUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/erasurecode/rawcoder/util/RSUtil.java
@@ -59,6 +59,10 @@ public final class RSUtil {
/**
* Ported from Intel ISA-L library.
+ *
+ * @param k k.
+ * @param a a.
+ * @param m m.
*/
public static void genCauchyMatrix(byte[] a, int m, int k) {
// Identity matrix in high position
@@ -82,6 +86,13 @@ public final class RSUtil {
*
* The algorithm is ported from Intel ISA-L library for compatible. It
* leverages Java auto-vectorization support for performance.
+ *
+ * @param gfTables gfTables.
+ * @param dataLen dataLen.
+ * @param inputs inputs.
+ * @param inputOffsets inputOffsets.
+ * @param outputs outputs.
+ * @param outputOffsets outputOffsets.
*/
public static void encodeData(byte[] gfTables, int dataLen, byte[][] inputs,
int[] inputOffsets, byte[][] outputs,
@@ -133,6 +144,10 @@ public final class RSUtil {
/**
* See above. Try to use the byte[] version when possible.
+ *
+ * @param gfTables gfTables.
+ * @param inputs inputs.
+ * @param outputs outputs.
*/
public static void encodeData(byte[] gfTables, ByteBuffer[] inputs,
ByteBuffer[] outputs) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/ByteArray.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/ByteArray.java
index c6c8b3fe3e1..964fb04c1b9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/ByteArray.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/ByteArray.java
@@ -35,7 +35,7 @@ public final class ByteArray implements RawComparable {
/**
* Constructing a ByteArray from a {@link BytesWritable}.
*
- * @param other
+ * @param other other.
*/
public ByteArray(BytesWritable other) {
this(other.getBytes(), 0, other.getLength());
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java
index 09cd2825e3c..aeacc16a78f 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/TFile.java
@@ -276,7 +276,7 @@ public class TFile {
*
* @param conf
* The configuration object.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Writer(FSDataOutputStream fsdos, int minBlockSize,
String compressName, String comparator, Configuration conf)
@@ -350,7 +350,7 @@ public class TFile {
* Buffer for key.
* @param value
* Buffer for value.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void append(byte[] key, byte[] value) throws IOException {
append(key, 0, key.length, value, 0, value.length);
@@ -521,7 +521,7 @@ public class TFile {
* exactly as many bytes as specified here before calling close on
* the returned output stream.
* @return The key appending output stream.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*
*/
public DataOutputStream prepareAppendKey(int length) throws IOException {
@@ -548,8 +548,8 @@ public class TFile {
* the returned output stream. Advertising the value size up-front
* guarantees that the value is encoded in one chunk, and avoids
* intermediate chunk buffering.
- * @throws IOException
- *
+ * @throws IOException raised on errors performing I/O.
+ * @return DataOutputStream.
*/
public DataOutputStream prepareAppendValue(int length) throws IOException {
if (state != State.END_KEY) {
@@ -588,7 +588,7 @@ public class TFile {
* {@link TFile#getSupportedCompressionAlgorithms()}.
* @return A DataOutputStream that can be used to write Meta Block data.
* Closing the stream would signal the ending of the block.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
* @throws MetaBlockAlreadyExists
* the Meta Block with the same name already exists.
*/
@@ -616,7 +616,7 @@ public class TFile {
* Name of the meta block.
* @return A DataOutputStream that can be used to write Meta Block data.
* Closing the stream would signal the ending of the block.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
* @throws MetaBlockAlreadyExists
* the Meta Block with the same name already exists.
*/
@@ -796,8 +796,8 @@ public class TFile {
* The length of TFile. This is required because we have no easy
* way of knowing the actual size of the input file through the
* File input stream.
- * @param conf
- * @throws IOException
+ * @param conf configuration.
+ * @throws IOException raised on errors performing I/O.
*/
public Reader(FSDataInputStream fsdis, long fileLength, Configuration conf)
throws IOException {
@@ -896,7 +896,7 @@ public class TFile {
* Get the first key in the TFile.
*
* @return The first key in the TFile.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public RawComparable getFirstKey() throws IOException {
checkTFileDataIndex();
@@ -907,7 +907,7 @@ public class TFile {
* Get the last key in the TFile.
*
* @return The last key in the TFile.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public RawComparable getLastKey() throws IOException {
checkTFileDataIndex();
@@ -1043,7 +1043,7 @@ public class TFile {
* the user supplied offset.
* @return the RecordNum to the corresponding entry. If no such entry
* exists, it returns the total entry count.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public long getRecordNumNear(long offset) throws IOException {
return getRecordNumByLocation(getLocationNear(offset));
@@ -1058,7 +1058,7 @@ public class TFile {
* @return the key that fits the requirement; or null if no such key exists
* (which could happen if the offset is close to the end of the
* TFile).
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public RawComparable getKeyNear(long offset) throws IOException {
int blockIndex = readerBCF.getBlockIndexNear(offset);
@@ -1072,7 +1072,7 @@ public class TFile {
*
* @return The scanner object. A valid Scanner is always returned even if
* the TFile is empty.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Scanner createScanner() throws IOException {
return new Scanner(this, begin, end);
@@ -1089,7 +1089,7 @@ public class TFile {
* specified byte-region but always round up to the compression
* block boundaries. It is possible that the returned scanner
* contains zero key-value pairs even if length is positive.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Scanner createScannerByByteRange(long offset, long length) throws IOException {
return new Scanner(this, offset, offset + length);
@@ -1106,7 +1106,7 @@ public class TFile {
* key-value entry of the TFile.
* @return The actual coverage of the returned scanner will cover all keys
* greater than or equal to the beginKey and less than the endKey.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*
* @deprecated Use {@link #createScannerByKey(byte[], byte[])} instead.
*/
@@ -1127,7 +1127,7 @@ public class TFile {
* key-value entry of the TFile.
* @return The actual coverage of the returned scanner will cover all keys
* greater than or equal to the beginKey and less than the endKey.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Scanner createScannerByKey(byte[] beginKey, byte[] endKey)
throws IOException {
@@ -1147,7 +1147,7 @@ public class TFile {
* key-value entry of the TFile.
* @return The actual coverage of the returned scanner will cover all keys
* greater than or equal to the beginKey and less than the endKey.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*
* @deprecated Use {@link #createScannerByKey(RawComparable, RawComparable)}
* instead.
@@ -1169,7 +1169,7 @@ public class TFile {
* key-value entry of the TFile.
* @return The actual coverage of the returned scanner will cover all keys
* greater than or equal to the beginKey and less than the endKey.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Scanner createScannerByKey(RawComparable beginKey, RawComparable endKey)
throws IOException {
@@ -1189,7 +1189,7 @@ public class TFile {
* The RecordNum for the last record (exclusive). To scan the whole
* file, either specify endRecNum==-1 or endRecNum==getEntryCount().
* @return The TFile scanner that covers the specified range of records.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Scanner createScannerByRecordNum(long beginRecNum, long endRecNum)
throws IOException {
@@ -1313,7 +1313,7 @@ public class TFile {
* @param endKey
* End key of the scan. If null, scan up to the last <K, V>
* entry of the TFile.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
protected Scanner(Reader reader, RawComparable beginKey,
RawComparable endKey) throws IOException {
@@ -1338,7 +1338,7 @@ public class TFile {
* @param key
* The input key
* @return true if we find an equal key.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public boolean seekTo(byte[] key) throws IOException {
return seekTo(key, 0, key.length);
@@ -1356,7 +1356,7 @@ public class TFile {
* @param keyLen
* key buffer length.
* @return true if we find an equal key; false otherwise.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public boolean seekTo(byte[] key, int keyOffset, int keyLen)
throws IOException {
@@ -1432,7 +1432,7 @@ public class TFile {
* Rewind to the first entry in the scanner. The entry returned by the
* previous entry() call will be invalid.
*
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void rewind() throws IOException {
seekTo(beginLocation);
@@ -1442,7 +1442,7 @@ public class TFile {
* Seek to the end of the scanner. The entry returned by the previous
* entry() call will be invalid.
*
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void seekToEnd() throws IOException {
parkCursorAtEnd();
@@ -1455,7 +1455,7 @@ public class TFile {
*
* @param key
* The input key
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void lowerBound(byte[] key) throws IOException {
lowerBound(key, 0, key.length);
@@ -1472,7 +1472,7 @@ public class TFile {
* offset in the key buffer.
* @param keyLen
* key buffer length.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void lowerBound(byte[] key, int keyOffset, int keyLen)
throws IOException {
@@ -1486,7 +1486,7 @@ public class TFile {
*
* @param key
* The input key
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void upperBound(byte[] key) throws IOException {
upperBound(key, 0, key.length);
@@ -1503,7 +1503,7 @@ public class TFile {
* offset in the key buffer.
* @param keyLen
* key buffer length.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void upperBound(byte[] key, int keyOffset, int keyLen)
throws IOException {
@@ -1516,7 +1516,7 @@ public class TFile {
*
* @return true if the cursor successfully moves. False when cursor is
* already at the end location and cannot be advanced.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public boolean advance() throws IOException {
if (atEnd()) {
@@ -1614,7 +1614,7 @@ public class TFile {
* Get an entry to access the key and value.
*
* @return The Entry object to access the key and value.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Entry entry() throws IOException {
checkKey();
@@ -1624,7 +1624,7 @@ public class TFile {
/**
* Get the RecordNum corresponding to the entry pointed by the cursor.
* @return The RecordNum corresponding to the entry pointed by the cursor.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public long getRecordNum() throws IOException {
return reader.getRecordNumByLocation(currentLocation);
@@ -1670,7 +1670,7 @@ public class TFile {
* BytesWritable to hold key.
* @param value
* BytesWritable to hold value
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void get(BytesWritable key, BytesWritable value)
throws IOException {
@@ -1684,7 +1684,8 @@ public class TFile {
*
* @param key
* BytesWritable to hold the key.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
+ * @return the key into BytesWritable.
*/
public int getKey(BytesWritable key) throws IOException {
key.setSize(getKeyLength());
@@ -1698,8 +1699,9 @@ public class TFile {
* directly uses the buffer inside BytesWritable for storing the value.
* The call does not require the value length to be known.
*
- * @param value
- * @throws IOException
+ * @param value value.
+ * @throws IOException raised on errors performing I/O.
+ * @return long value.
*/
public long getValue(BytesWritable value) throws IOException {
DataInputStream dis = getValueStream();
@@ -1725,7 +1727,7 @@ public class TFile {
* @param out
* The output stream
* @return the length of the key.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public int writeKey(OutputStream out) throws IOException {
out.write(keyBuffer, 0, klen);
@@ -1740,7 +1742,7 @@ public class TFile {
* @param out
* The output stream
* @return the length of the value
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public long writeValue(OutputStream out) throws IOException {
DataInputStream dis = getValueStream();
@@ -1768,7 +1770,7 @@ public class TFile {
* not be shorter than the key length.
* @return The length of the key.
*
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public int getKey(byte[] buf) throws IOException {
return getKey(buf, 0);
@@ -1784,7 +1786,7 @@ public class TFile {
* the key into. Requiring the key-length + offset no greater
* than the buffer length.
* @return The length of the key.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public int getKey(byte[] buf, int offset) throws IOException {
if ((offset | (buf.length - offset - klen)) < 0) {
@@ -1828,10 +1830,11 @@ public class TFile {
* without moving the cursor will result in exception:
* {@link #getValue(byte[])}, {@link #getValue(byte[], int)},
* {@link #getValueStream}.
- *
+ *
+ * @param buf buf.
* @return the length of the value. Does not require
* isValueLengthKnown() to be true.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*
*/
public int getValue(byte[] buf) throws IOException {
@@ -1846,10 +1849,12 @@ public class TFile {
* functions more than once without moving the cursor will result in
* exception: {@link #getValue(byte[])}, {@link #getValue(byte[], int)},
* {@link #getValueStream}.
- *
+ *
+ * @param buf buf.
+ * @param offset offset.
* @return the length of the value. Does not require
* isValueLengthKnown() to be true.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public int getValue(byte[] buf, int offset) throws IOException {
DataInputStream dis = getValueStream();
@@ -1892,7 +1897,7 @@ public class TFile {
* {@link #getValue(byte[], int)}, {@link #getValueStream}.
*
* @return The input stream for reading the value.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public DataInputStream getValueStream() throws IOException {
if (valueChecked == true) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Utils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Utils.java
index 17a27f16b9a..714dc5a12ac 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Utils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/file/tfile/Utils.java
@@ -49,7 +49,7 @@ public final class Utils {
* output stream
* @param n
* The integer to be encoded
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
* @see Utils#writeVLong(DataOutput, long)
*/
public static void writeVInt(DataOutput out, int n) throws IOException {
@@ -95,7 +95,7 @@ public final class Utils {
* output stream
* @param n
* the integer number
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@SuppressWarnings("fallthrough")
public static void writeVLong(DataOutput out, long n) throws IOException {
@@ -170,7 +170,7 @@ public final class Utils {
* @param in
* input stream
* @return the decoded integer
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*
* @see Utils#readVLong(DataInput)
*/
@@ -199,7 +199,7 @@ public final class Utils {
* @param in
* input stream
* @return the decoded long integer.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static long readVLong(DataInput in) throws IOException {
@@ -249,9 +249,9 @@ public final class Utils {
/**
* Write a String as a VInt n, followed by n Bytes as in Text format.
*
- * @param out
- * @param s
- * @throws IOException
+ * @param out out.
+ * @param s s.
+ * @throws IOException raised on errors performing I/O.
*/
public static void writeString(DataOutput out, String s) throws IOException {
if (s != null) {
@@ -271,7 +271,7 @@ public final class Utils {
* @param in
* The input stream.
* @return The string
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static String readString(DataInput in) throws IOException {
int length = readVInt(in);
@@ -299,7 +299,7 @@ public final class Utils {
*
* @param in
* input stream
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public Version(DataInput in) throws IOException {
major = in.readShort();
@@ -326,7 +326,7 @@ public final class Utils {
*
* @param out
* The DataOutput object.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public void write(DataOutput out) throws IOException {
out.writeShort(major);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
index ebe7f213cee..5cf820c50ca 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/nativeio/NativeIO.java
@@ -355,7 +355,7 @@ public class NativeIO {
}
/**
- * Return true if the JNI-based native IO extensions are available.
+ * @return Return true if the JNI-based native IO extensions are available.
*/
public static boolean isAvailable() {
return NativeCodeLoader.isNativeCodeLoaded() && nativeLoaded;
@@ -367,7 +367,14 @@ public class NativeIO {
}
}
- /** Wrapper around open(2) */
+ /**
+ * Wrapper around open(2) .
+ * @param path input path.
+ * @param flags input flags.
+ * @param mode input mode.
+ * @return FileDescriptor.
+ * @throws IOException raised on errors performing I/O.
+ */
public static native FileDescriptor open(String path, int flags, int mode) throws IOException;
/** Wrapper around fstat(2) */
private static native Stat fstat(FileDescriptor fd) throws IOException;
@@ -428,6 +435,10 @@ public class NativeIO {
* for this syscall for more information. On systems where this
* call is not available, does nothing.
*
+ * @param fd input fd.
+ * @param offset input offset.
+ * @param nbytes input nbytes.
+ * @param flags input flag.
* @throws NativeIOException if there is an error with the syscall
*/
public static void syncFileRangeIfPossible(
@@ -712,7 +723,14 @@ public class NativeIO {
private static native void createDirectoryWithMode0(String path, int mode)
throws NativeIOException;
- /** Wrapper around CreateFile() on Windows */
+ /**
+ * @return Wrapper around CreateFile() on Windows.
+ * @param path input path.
+ * @param desiredAccess input desiredAccess.
+ * @param shareMode input shareMode.
+ * @param creationDisposition input creationDisposition.
+ * @throws IOException raised on errors performing I/O.
+ */
public static native FileDescriptor createFile(String path,
long desiredAccess, long shareMode, long creationDisposition)
throws IOException;
@@ -749,7 +767,13 @@ public class NativeIO {
long desiredAccess, long shareMode, long creationDisposition, int mode)
throws NativeIOException;
- /** Wrapper around SetFilePointer() on Windows */
+ /**
+ * @return Wrapper around SetFilePointer() on Windows.
+ * @param fd input fd.
+ * @param distanceToMove input distanceToMove.
+ * @param moveMethod input moveMethod.
+ * @throws IOException raised on errors performing I/O.
+ */
public static native long setFilePointer(FileDescriptor fd,
long distanceToMove, long moveMethod) throws IOException;
@@ -840,7 +864,7 @@ public class NativeIO {
}
/**
- * Return true if the JNI-based native IO extensions are available.
+ * @return Return true if the JNI-based native IO extensions are available.
*/
public static boolean isAvailable() {
return NativeCodeLoader.isNativeCodeLoaded() && nativeLoaded;
@@ -898,6 +922,7 @@ public class NativeIO {
*
* @param name the full principal name containing the domain
* @return name with domain removed
+ * @throws IOException raised on errors performing I/O.
*/
private static String stripDomain(String name) {
int i = name.indexOf('\\');
@@ -933,6 +958,11 @@ public class NativeIO {
* file opened at a given offset, i.e. other process can delete
* the file the FileDescriptor is reading. Only Windows implementation
* uses the native interface.
+ *
+ * @param f input f.
+ * @param seekOffset input seekOffset.
+ * @return FileDescriptor.
+ * @throws IOException raised on errors performing I/O.
*/
public static FileDescriptor getShareDeleteFileDescriptor(
File f, long seekOffset) throws IOException {
@@ -961,7 +991,7 @@ public class NativeIO {
}
/**
- * Create the specified File for write access, ensuring that it does not exist.
+ * @return Create the specified File for write access, ensuring that it does not exist.
* @param f the file that we want to create
* @param permissions we want to have on the file (if security is enabled)
*
@@ -1045,7 +1075,7 @@ public class NativeIO {
*
* @param src source file
* @param dst hardlink location
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Deprecated
public static void link(File src, File dst) throws IOException {
@@ -1103,7 +1133,7 @@ public class NativeIO {
*
* @param src The source path
* @param dst The destination path
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static void copyFileUnbuffered(File src, File dst) throws IOException {
if (nativeLoaded && Shell.WINDOWS) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/AsyncCallHandler.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/AsyncCallHandler.java
index 06dd0d45b3c..3ebbcd912dc 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/AsyncCallHandler.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/AsyncCallHandler.java
@@ -49,7 +49,11 @@ public class AsyncCallHandler {
private static final ThreadLocal>
ASYNC_RETURN = new ThreadLocal<>();
- /** @return the async return value from {@link AsyncCallHandler}. */
+ /**
+ * @return the async return value from {@link AsyncCallHandler}.
+ * @param T.
+ * @param R.
+ */
@InterfaceStability.Unstable
@SuppressWarnings("unchecked")
public static AsyncGet getAsyncReturn() {
@@ -62,7 +66,10 @@ public class AsyncCallHandler {
}
}
- /** For the lower rpc layers to set the async return value. */
+ /**
+ * For the lower rpc layers to set the async return value.
+ * @param asyncReturn asyncReturn.
+ */
@InterfaceStability.Unstable
public static void setLowerLayerAsyncReturn(
AsyncGet, Exception> asyncReturn) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java
index 842811edb39..0b66347f1f9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryPolicies.java
@@ -75,6 +75,10 @@ public class RetryPolicies {
*
* Keep trying forever with a fixed time between attempts.
*
+ *
+ * @param sleepTime sleepTime.
+ * @param timeUnit timeUnit.
+ * @return RetryPolicy.
*/
public static final RetryPolicy retryForeverWithFixedSleep(long sleepTime,
TimeUnit timeUnit) {
@@ -87,6 +91,11 @@ public class RetryPolicies {
* Keep trying a limited number of times, waiting a fixed time between attempts,
* and then fail by re-throwing the exception.
*
+ *
+ * @param maxRetries maxRetries.
+ * @param sleepTime sleepTime.
+ * @param timeUnit timeUnit.
+ * @return RetryPolicy.
*/
public static final RetryPolicy retryUpToMaximumCountWithFixedSleep(int maxRetries, long sleepTime, TimeUnit timeUnit) {
return new RetryUpToMaximumCountWithFixedSleep(maxRetries, sleepTime, timeUnit);
@@ -97,6 +106,11 @@ public class RetryPolicies {
* Keep trying for a maximum time, waiting a fixed time between attempts,
* and then fail by re-throwing the exception.
*
+ *
+ * @param timeUnit timeUnit.
+ * @param sleepTime sleepTime.
+ * @param maxTime maxTime.
+ * @return RetryPolicy.
*/
public static final RetryPolicy retryUpToMaximumTimeWithFixedSleep(long maxTime, long sleepTime, TimeUnit timeUnit) {
return new RetryUpToMaximumTimeWithFixedSleep(maxTime, sleepTime, timeUnit);
@@ -108,6 +122,11 @@ public class RetryPolicies {
* and then fail by re-throwing the exception.
* The time between attempts is sleepTime
mutliplied by the number of tries so far.
*
+ *
+ * @param sleepTime sleepTime.
+ * @param maxRetries maxRetries.
+ * @param timeUnit timeUnit.
+ * @return RetryPolicy.
*/
public static final RetryPolicy retryUpToMaximumCountWithProportionalSleep(int maxRetries, long sleepTime, TimeUnit timeUnit) {
return new RetryUpToMaximumCountWithProportionalSleep(maxRetries, sleepTime, timeUnit);
@@ -120,6 +139,12 @@ public class RetryPolicies {
* The time between attempts is sleepTime
mutliplied by a random
* number in the range of [0, 2 to the number of retries)
*
+ *
+ *
+ * @param timeUnit timeUnit.
+ * @param maxRetries maxRetries.
+ * @param sleepTime sleepTime.
+ * @return RetryPolicy.
*/
public static final RetryPolicy exponentialBackoffRetry(
int maxRetries, long sleepTime, TimeUnit timeUnit) {
@@ -130,6 +155,10 @@ public class RetryPolicies {
*
* Set a default policy with some explicit handlers for specific exceptions.
*
+ *
+ * @param exceptionToPolicyMap exceptionToPolicyMap.
+ * @param defaultPolicy defaultPolicy.
+ * @return RetryPolicy.
*/
public static final RetryPolicy retryByException(RetryPolicy defaultPolicy,
Map, RetryPolicy> exceptionToPolicyMap) {
@@ -141,6 +170,10 @@ public class RetryPolicies {
* A retry policy for RemoteException
* Set a default policy with some explicit handlers for specific exceptions.
*
+ *
+ * @param defaultPolicy defaultPolicy.
+ * @param exceptionToPolicyMap exceptionToPolicyMap.
+ * @return RetryPolicy.
*/
public static final RetryPolicy retryByRemoteException(
RetryPolicy defaultPolicy,
@@ -150,6 +183,9 @@ public class RetryPolicies {
/**
* A retry policy for exceptions other than RemoteException.
+ * @param defaultPolicy defaultPolicy.
+ * @param exceptionToPolicyMap exceptionToPolicyMap.
+ * @return RetryPolicy.
*/
public static final RetryPolicy retryOtherThanRemoteException(
RetryPolicy defaultPolicy,
@@ -437,6 +473,7 @@ public class RetryPolicies {
* where t_i and n_i are the i-th pair of sleep time and number of retries.
* Note that the white spaces in the string are ignored.
*
+ * @param s input string.
* @return the parsed object, or null if the parsing fails.
*/
public static MultipleLinearRandomRetry parseCommaSeparatedString(String s) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryProxy.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryProxy.java
index 7fcd5fd4b00..eaff5bbd528 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryProxy.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryProxy.java
@@ -34,6 +34,7 @@ public class RetryProxy {
* @param iface the interface that the retry will implement
* @param implementation the instance whose methods should be retried
* @param retryPolicy the policy for retrying method call failures
+ * @param T.
* @return the retry proxy
*/
public static Object create(Class iface, T implementation,
@@ -51,6 +52,7 @@ public class RetryProxy {
* @param iface the interface that the retry will implement
* @param proxyProvider provides implementation instances whose methods should be retried
* @param retryPolicy the policy for retrying or failing over method call failures
+ * @param T.
* @return the retry proxy
*/
public static Object create(Class iface,
@@ -69,6 +71,7 @@ public class RetryProxy {
* {@link RetryPolicies#TRY_ONCE_THEN_FAIL} is used.
*
* @param iface the interface that the retry will implement
+ * @param T.
* @param implementation the instance whose methods should be retried
* @param methodNameToPolicyMap a map of method names to retry policies
* @return the retry proxy
@@ -90,6 +93,8 @@ public class RetryProxy {
* @param iface the interface that the retry will implement
* @param proxyProvider provides implementation instances whose methods should be retried
* @param methodNameToPolicyMap map of method names to retry policies
+ * @param defaultPolicy defaultPolicy.
+ * @param T.
* @return the retry proxy
*/
public static Object create(Class iface,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java
index c035a42d4a7..d2fb070ee2c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/retry/RetryUtils.java
@@ -48,7 +48,7 @@ public class RetryUtils {
* - non-IOException.
*
*
- * @param conf
+ * @param conf configuration.
* @param retryPolicyEnabledKey conf property key for enabling retry
* @param defaultRetryPolicyEnabled default retryPolicyEnabledKey conf value
* @param retryPolicySpecKey conf property key for retry policy spec
@@ -168,7 +168,7 @@ public class RetryUtils {
* Retry policy spec:
* N pairs of sleep-time and number-of-retries "s1,n1,s2,n2,..."
*
- * @param conf
+ * @param conf configuration.
* @param retryPolicyEnabledKey conf property key for enabling retry
* @param defaultRetryPolicyEnabled default retryPolicyEnabledKey conf value
* @param retryPolicySpecKey conf property key for retry policy spec
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Deserializer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Deserializer.java
index 3c8dfccafa8..4bdd60d90c3 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Deserializer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Deserializer.java
@@ -35,13 +35,15 @@ import org.apache.hadoop.classification.InterfaceStability;
* other producers may read from the input between calls to
* {@link #deserialize(Object)}.
*
- * @param
+ * @param generic type.
*/
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
@InterfaceStability.Evolving
public interface Deserializer {
/**
* Prepare the deserializer for reading.
+ * @param in input stream.
+ * @throws IOException raised on errors performing I/O.
*/
void open(InputStream in) throws IOException;
@@ -53,12 +55,15 @@ public interface Deserializer {
* stream. Otherwise, if the object t
is null a new
* deserialized object will be created.
*
+ * @param t t.
* @return the deserialized object
+ * @throws IOException raised on errors performing I/O.
*/
T deserialize(T t) throws IOException;
/**
* Close the underlying input stream and clear up any resources.
+ * @throws IOException raised on errors performing I/O.
*/
void close() throws IOException;
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/DeserializerComparator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/DeserializerComparator.java
index 05205c5523c..29c04f66d43 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/DeserializerComparator.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/DeserializerComparator.java
@@ -37,7 +37,7 @@ import org.apache.hadoop.io.RawComparator;
* implementation of {@link RawComparator} that operates directly
* on byte representations.
*
- * @param
+ * @param generic type.
*/
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
@InterfaceStability.Evolving
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java
index f9bf692f1fc..d53f7ab75c5 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/JavaSerializationComparator.java
@@ -31,7 +31,7 @@ import org.apache.hadoop.io.RawComparator;
* {@link Deserializer} to deserialize objects that are then compared via
* their {@link Comparable} interfaces.
*
- * @param
+ * @param generic type.
* @see JavaSerialization
*/
@InterfaceAudience.Public
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serialization.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serialization.java
index 6f2097f7bf9..0793dc1ca01 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serialization.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serialization.java
@@ -25,7 +25,7 @@ import org.apache.hadoop.classification.InterfaceStability;
*
* Encapsulates a {@link Serializer}/{@link Deserializer} pair.
*
- * @param
+ * @param generic type.
*/
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
@InterfaceStability.Evolving
@@ -34,16 +34,21 @@ public interface Serialization {
/**
* Allows clients to test whether this {@link Serialization}
* supports the given class.
+ *
+ * @param c class.
+ * @return if accept true,not false.
*/
boolean accept(Class> c);
/**
* @return a {@link Serializer} for the given class.
+ * @param c class.
*/
Serializer getSerializer(Class c);
/**
* @return a {@link Deserializer} for the given class.
+ * @param c class.
*/
Deserializer getDeserializer(Class c);
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java
index ce0c3fe398e..b531ae85233 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/SerializationFactory.java
@@ -52,6 +52,8 @@ public class SerializationFactory extends Configured {
* property from conf
, which is a comma-delimited list of
* classnames.
*
+ *
+ * @param conf configuration.
*/
public SerializationFactory(Configuration conf) {
super(conf);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serializer.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serializer.java
index 5ada541370e..c44b3678fc3 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serializer.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/Serializer.java
@@ -35,23 +35,28 @@ import org.apache.hadoop.classification.InterfaceStability;
* other producers may write to the output between calls to
* {@link #serialize(Object)}.
*
- * @param
+ * @param generic type.
*/
@InterfaceAudience.LimitedPrivate({"HDFS", "MapReduce"})
@InterfaceStability.Evolving
public interface Serializer {
/**
* Prepare the serializer for writing.
+ * @param out output stream.
+ * @throws IOException raised on errors performing I/O.
*/
void open(OutputStream out) throws IOException;
/**
* Serialize t
to the underlying output stream.
+ * @param t t.
+ * @throws IOException raised on errors performing I/O.
*/
void serialize(T t) throws IOException;
/**
* Close the underlying output stream and clear up any resources.
+ * @throws IOException raised on errors performing I/O.
*/
void close() throws IOException;
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java
index f340cb3a98a..2327fd2d55a 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/io/serializer/avro/AvroSerialization.java
@@ -61,18 +61,24 @@ public abstract class AvroSerialization extends Configured
/**
* Return an Avro Schema instance for the given class.
+ * @param t Generics Type T.
+ * @return schema.
*/
@InterfaceAudience.Private
public abstract Schema getSchema(T t);
/**
* Create and return Avro DatumWriter for the given class.
+ * @param clazz clazz.
+ * @return DatumWriter.
*/
@InterfaceAudience.Private
public abstract DatumWriter getWriter(Class clazz);
/**
* Create and return Avro DatumReader for the given class.
+ * @param clazz clazz.
+ * @return DatumReader.
*/
@InterfaceAudience.Private
public abstract DatumReader getReader(Class clazz);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/AlignmentContext.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/AlignmentContext.java
index fbf825bcb91..3d309235fe8 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/AlignmentContext.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/AlignmentContext.java
@@ -71,7 +71,7 @@ public interface AlignmentContext {
* misaligned with the client state.
* See implementation for more details.
* @return state id required for the server to execute the call.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
long receiveRequestState(RpcRequestHeaderProto header, long threshold)
throws IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/CallQueueManager.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/CallQueueManager.java
index 6cc2540c174..fa6f34adaf3 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/CallQueueManager.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/CallQueueManager.java
@@ -405,6 +405,12 @@ public class CallQueueManager
/**
* Replaces active queue with the newly requested one and transfers
* all calls to the newQ before returning.
+ *
+ * @param schedulerClass input schedulerClass.
+ * @param queueClassToUse input queueClassToUse.
+ * @param maxSize input maxSize.
+ * @param ns input ns.
+ * @param conf input configuration.
*/
public synchronized void swapQueue(
Class extends RpcScheduler> schedulerClass,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
index 49432aff117..2fe8aca85ed 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Client.java
@@ -112,7 +112,12 @@ public class Client implements AutoCloseable {
return (AsyncGet) ASYNC_RPC_RESPONSE.get();
}
- /** Set call id and retry count for the next call. */
+ /**
+ * Set call id and retry count for the next call.
+ * @param cid input cid.
+ * @param rc input rc.
+ * @param externalHandler input externalHandler.
+ */
public static void setCallIdAndRetryCount(int cid, int rc,
Object externalHandler) {
Preconditions.checkArgument(cid != RpcConstants.INVALID_CALL_ID);
@@ -1349,8 +1354,14 @@ public class Client implements AutoCloseable {
}
}
- /** Construct an IPC client whose values are of the given {@link Writable}
- * class. */
+ /**
+ * Construct an IPC client whose values are of the given {@link Writable}
+ * class.
+ *
+ * @param valueClass input valueClass.
+ * @param conf input configuration.
+ * @param factory input factory.
+ */
public Client(Class extends Writable> valueClass, Configuration conf,
SocketFactory factory) {
this.valueClass = valueClass;
@@ -1372,9 +1383,9 @@ public class Client implements AutoCloseable {
}
/**
- * Construct an IPC client with the default SocketFactory
- * @param valueClass
- * @param conf
+ * Construct an IPC client with the default SocketFactory.
+ * @param valueClass input valueClass.
+ * @param conf input Configuration.
*/
public Client(Class extends Writable> valueClass, Configuration conf) {
this(valueClass, conf, NetUtils.getDefaultSocketFactory(conf));
@@ -1432,7 +1443,7 @@ public class Client implements AutoCloseable {
* Make a call, passing rpcRequest
, to the IPC server defined by
* remoteId
, returning the rpc respond.
*
- * @param rpcKind
+ * @param rpcKind - input rpcKind.
* @param rpcRequest - contains serialized method and method parameters
* @param remoteId - the target rpc server
* @param fallbackToSimpleAuth - set to true or false during this method to
@@ -1440,6 +1451,7 @@ public class Client implements AutoCloseable {
* @return the rpc response
* Throws exceptions if there are network problems or if the remote code
* threw an exception.
+ * @throws IOException raised on errors performing I/O.
*/
public Writable call(RPC.RpcKind rpcKind, Writable rpcRequest,
ConnectionId remoteId, AtomicBoolean fallbackToSimpleAuth)
@@ -1760,7 +1772,7 @@ public class Client implements AutoCloseable {
return maxRetriesOnSasl;
}
- /** max connection retries on socket time outs */
+ /** @return max connection retries on socket time outs */
public int getMaxRetriesOnSocketTimeouts() {
return maxRetriesOnSocketTimeouts;
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ClientCache.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ClientCache.java
index b7257c8b2a6..c5d0183dba2 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ClientCache.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ClientCache.java
@@ -93,6 +93,8 @@ public class ClientCache {
/**
* Stop a RPC client connection
* A RPC client is closed only when its reference count becomes zero.
+ *
+ * @param client input client.
*/
public void stopClient(Client client) {
if (Client.LOG.isDebugEnabled()) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ClientId.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ClientId.java
index 152e062392f..bab1de753f2 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ClientId.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ClientId.java
@@ -35,7 +35,7 @@ public class ClientId {
private static final int shiftWidth = 8;
/**
- * Return clientId as byte[]
+ * @return Return clientId as byte[].
*/
public static byte[] getClientId() {
UUID uuid = UUID.randomUUID();
@@ -45,7 +45,10 @@ public class ClientId {
return buf.array();
}
- /** Convert a clientId byte[] to string */
+ /**
+ * @return Convert a clientId byte[] to string.
+ * @param clientId input clientId.
+ */
public static String toString(byte[] clientId) {
// clientId can be null or an empty array
if (clientId == null || clientId.length == 0) {
@@ -74,7 +77,10 @@ public class ClientId {
return lsb;
}
- /** Convert from clientId string byte[] representation of clientId */
+ /**
+ * @return Convert from clientId string byte[] representation of clientId.
+ * @param id input id.
+ */
public static byte[] toBytes(String id) {
if (id == null || "".equals(id)) {
return new byte[0];
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/GenericRefreshProtocol.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/GenericRefreshProtocol.java
index bfa055bcb09..10e661a3095 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/GenericRefreshProtocol.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/GenericRefreshProtocol.java
@@ -41,7 +41,11 @@ public interface GenericRefreshProtocol {
/**
* Refresh the resource based on identity passed in.
- * @throws IOException
+ *
+ * @param identifier input identifier.
+ * @param args input args.
+ * @throws IOException raised on errors performing I/O.
+ * @return Collection RefreshResponse.
*/
@Idempotent
Collection refresh(String identifier, String[] args)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java
index 1e110b90113..9ed0640c8dc 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufHelper.java
@@ -85,7 +85,7 @@ public class ProtobufHelper {
/**
* Get the ByteString for frequently used fixed and small set strings.
* @param key string
- * @return
+ * @return the ByteString for frequently used fixed and small set strings.
*/
public static ByteString getFixedByteString(Text key) {
ByteString value = FIXED_BYTESTRING_CACHE.get(key);
@@ -99,7 +99,7 @@ public class ProtobufHelper {
/**
* Get the ByteString for frequently used fixed and small set strings.
* @param key string
- * @return
+ * @return ByteString for frequently used fixed and small set strings.
*/
public static ByteString getFixedByteString(String key) {
ByteString value = FIXED_BYTESTRING_CACHE.get(key);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
index c4457a653e3..e53f57b1fc9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine.java
@@ -144,6 +144,10 @@ public class ProtobufRpcEngine implements RpcEngine {
/**
* This constructor takes a connectionId, instead of creating a new one.
+ * @param protocol input protocol.
+ * @param connId input connId.
+ * @param conf input Configuration.
+ * @param factory input factory.
*/
protected Invoker(Class> protocol, Client.ConnectionId connId,
Configuration conf, SocketFactory factory) {
@@ -423,6 +427,10 @@ public class ProtobufRpcEngine implements RpcEngine {
* @param portRangeConfig A config parameter that can be used to restrict
* the range of ports used when port is 0 (an ephemeral port)
* @param alignmentContext provides server state info on client responses
+ * @param secretManager input secretManager.
+ * @param queueSizePerHandler input queueSizePerHandler.
+ * @param numReaders input numReaders.
+ * @throws IOException raised on errors performing I/O.
*/
public Server(Class> protocolClass, Object protocolImpl,
Configuration conf, String bindAddress, int port, int numHandlers,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine2.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine2.java
index e1ee3742828..3a8c6275820 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine2.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtobufRpcEngine2.java
@@ -151,6 +151,11 @@ public class ProtobufRpcEngine2 implements RpcEngine {
/**
* This constructor takes a connectionId, instead of creating a new one.
+ *
+ * @param protocol input protocol.
+ * @param connId input connId.
+ * @param conf input Configuration.
+ * @param factory input factory.
*/
protected Invoker(Class> protocol, Client.ConnectionId connId,
Configuration conf, SocketFactory factory) {
@@ -458,6 +463,7 @@ public class ProtobufRpcEngine2 implements RpcEngine {
* @param portRangeConfig A config parameter that can be used to restrict
* the range of ports used when port is 0 (an ephemeral port)
* @param alignmentContext provides server state info on client responses
+ * @throws IOException raised on errors performing I/O.
*/
public Server(Class> protocolClass, Object protocolImpl,
Configuration conf, String bindAddress, int port, int numHandlers,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolMetaInterface.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolMetaInterface.java
index 29c07ac29cb..f23c05936a3 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolMetaInterface.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolMetaInterface.java
@@ -36,7 +36,7 @@ public interface ProtocolMetaInterface {
* It is assumed that all method names are unique for a protocol.
* @param methodName The name of the method
* @return true if method is supported, otherwise false.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public boolean isMethodSupported(String methodName) throws IOException;
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolProxy.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolProxy.java
index cc66958d14e..49029f97b3d 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolProxy.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/ProtocolProxy.java
@@ -85,11 +85,12 @@ public class ProtocolProxy {
}
/**
- * Check if a method is supported by the server or not
+ * Check if a method is supported by the server or not.
*
* @param methodName a method's name in String format
* @param parameterTypes a method's parameter types
* @return true if the method is supported by the server
+ * @throws IOException raised on errors performing I/O.
*/
public synchronized boolean isMethodSupported(String methodName,
Class>... parameterTypes)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
index 3bbd82d153a..818305b3169 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RPC.java
@@ -150,6 +150,9 @@ public class RPC {
* Get the protocol name.
* If the protocol class has a ProtocolAnnotation, then get the protocol
* name from the annotation; otherwise the class name is the protocol name.
+ *
+ * @param protocol input protocol.
+ * @return protocol name.
*/
static public String getProtocolName(Class> protocol) {
if (protocol == null) {
@@ -164,6 +167,9 @@ public class RPC {
* If the protocol class has a ProtocolAnnotation,
* then get the protocol version from the annotation;
* otherwise get it from the versionID field of the protocol class.
+ *
+ * @param protocol input protocol.
+ * @return ProtocolVersion.
*/
static public long getProtocolVersion(Class> protocol) {
if (protocol == null) {
@@ -258,14 +264,14 @@ public class RPC {
}
/**
- * Get the client's preferred version
+ * @return Get the client's preferred version.
*/
public long getClientVersion() {
return clientVersion;
}
/**
- * Get the server's agreed to version.
+ * @return Get the server's agreed to version.
*/
public long getServerVersion() {
return serverVersion;
@@ -286,8 +292,9 @@ public class RPC {
}
/**
- * Get a proxy connection to a remote server
- *
+ * Get a proxy connection to a remote server.
+ *
+ * @param Generics Type T.
* @param protocol protocol class
* @param clientVersion client version
* @param addr remote address
@@ -306,8 +313,9 @@ public class RPC {
/**
* Get a protocol proxy that contains a proxy connection to a remote server
- * and a set of methods that are supported by the server
- *
+ * and a set of methods that are supported by the server.
+ *
+ * @param Generics Type T.
* @param protocol protocol class
* @param clientVersion client version
* @param addr remote address
@@ -324,8 +332,9 @@ public class RPC {
}
/**
- * Get a proxy connection to a remote server
- *
+ * Get a proxy connection to a remote server.
+ *
+ * @param Generics Type T.
* @param protocol protocol class
* @param clientVersion client version
* @param addr remote address
@@ -344,7 +353,8 @@ public class RPC {
/**
* Get a protocol proxy that contains a proxy connection to a remote server
* and a set of methods that are supported by the server
- *
+ *
+ * @param Generics Type T.
* @param protocol protocol class
* @param clientVersion client version
* @param addr remote address
@@ -362,8 +372,9 @@ public class RPC {
}
/**
- * Get a proxy connection to a remote server
- *
+ * Get a proxy connection to a remote server.
+ *
+ * @param Generics Type T.
* @param protocol protocol class
* @param clientVersion client version
* @param addr remote address
@@ -384,16 +395,18 @@ public class RPC {
/**
* Get a protocol proxy that contains a proxy connection to a remote server
- * and a set of methods that are supported by the server
- *
+ * and a set of methods that are supported by the server.
+ *
+ * @param Generics Type.
* @param protocol protocol class
* @param clientVersion client version
* @param addr remote address
* @param conf configuration to use
* @param rpcTimeout timeout for each RPC
+ * @param connectionRetryPolicy input connectionRetryPolicy.
* @param timeout time in milliseconds before giving up
* @return the proxy
- * @throws IOException if the far end through a RemoteException
+ * @throws IOException if the far end through a RemoteException.
*/
public static ProtocolProxy waitForProtocolProxy(Class protocol,
long clientVersion,
@@ -439,9 +452,18 @@ public class RPC {
}
}
- /** Construct a client-side proxy object that implements the named protocol,
+ /**
+ * Construct a client-side proxy object that implements the named protocol,
* talking to a server at the named address.
- * @param */
+ * @param Generics Type T.
+ * @param protocol input protocol.
+ * @param clientVersion input clientVersion.
+ * @param addr input addr.
+ * @param conf input Configuration.
+ * @param factory input factory.
+ * @throws IOException raised on errors performing I/O.
+ * @return proxy.
+ */
public static T getProxy(Class protocol,
long clientVersion,
InetSocketAddress addr, Configuration conf,
@@ -452,8 +474,9 @@ public class RPC {
/**
* Get a protocol proxy that contains a proxy connection to a remote server
- * and a set of methods that are supported by the server
- *
+ * and a set of methods that are supported by the server.
+ *
+ * @param Generics Type T.
* @param protocol protocol class
* @param clientVersion client version
* @param addr remote address
@@ -470,9 +493,21 @@ public class RPC {
return getProtocolProxy(protocol, clientVersion, addr, ugi, conf, factory);
}
- /** Construct a client-side proxy object that implements the named protocol,
+ /**
+ * Construct a client-side proxy object that implements the named protocol,
* talking to a server at the named address.
- * @param */
+ *
+ * @param Generics Type T.
+ * @param protocol input protocol.
+ * @param clientVersion input clientVersion.
+ * @param addr input addr.
+ * @param ticket input tocket.
+ * @param conf input conf.
+ * @param factory input factory.
+ * @return the protocol proxy.
+ * @throws IOException raised on errors performing I/O.
+ *
+ */
public static T getProxy(Class protocol,
long clientVersion,
InetSocketAddress addr,
@@ -486,7 +521,8 @@ public class RPC {
/**
* Get a protocol proxy that contains a proxy connection to a remote server
* and a set of methods that are supported by the server
- *
+ *
+ * @param Generics Type T.
* @param protocol protocol class
* @param clientVersion client version
* @param addr remote address
@@ -509,8 +545,8 @@ public class RPC {
/**
* Construct a client-side proxy that implements the named protocol,
* talking to a server at the named address.
- * @param
- *
+ *
+ * @param Generics Type T.
* @param protocol protocol
* @param clientVersion client's version
* @param addr server address
@@ -534,8 +570,9 @@ public class RPC {
/**
* Get a protocol proxy that contains a proxy connection to a remote server
- * and a set of methods that are supported by the server
- *
+ * and a set of methods that are supported by the server.
+ *
+ * @param Generics Type T.
* @param protocol protocol
* @param clientVersion client's version
* @param addr server address
@@ -561,8 +598,9 @@ public class RPC {
/**
* Get a protocol proxy that contains a proxy connection to a remote server
- * and a set of methods that are supported by the server
+ * and a set of methods that are supported by the server.
*
+ * @param Generics Type T.
* @param protocol protocol
* @param clientVersion client's version
* @param addr server address
@@ -609,6 +647,7 @@ public class RPC {
* @param fallbackToSimpleAuth set to true or false during calls to indicate
* if a secure client falls back to simple auth
* @param alignmentContext state alignment context
+ * @param Generics Type T.
* @return the proxy
* @throws IOException if any error occurs
*/
@@ -632,15 +671,15 @@ public class RPC {
}
/**
- * Construct a client-side proxy object with the default SocketFactory
- * @param
- *
- * @param protocol
- * @param clientVersion
- * @param addr
- * @param conf
+ * Construct a client-side proxy object with the default SocketFactory.
+ *
+ * @param Generics Type T.
+ * @param protocol input protocol.
+ * @param clientVersion input clientVersion.
+ * @param addr input addr.
+ * @param conf input Configuration.
* @return a proxy instance
- * @throws IOException
+ * @throws IOException if the thread is interrupted.
*/
public static T getProxy(Class protocol,
long clientVersion,
@@ -651,7 +690,8 @@ public class RPC {
}
/**
- * Returns the server address for a given proxy.
+ * @return Returns the server address for a given proxy.
+ * @param proxy input proxy.
*/
public static InetSocketAddress getServerAddress(Object proxy) {
return getConnectionIdForProxy(proxy).getAddress();
@@ -678,12 +718,13 @@ public class RPC {
* Get a protocol proxy that contains a proxy connection to a remote server
* and a set of methods that are supported by the server
*
- * @param protocol
- * @param clientVersion
- * @param addr
- * @param conf
+ * @param protocol input protocol.
+ * @param clientVersion input clientVersion.
+ * @param addr input addr.
+ * @param conf input configuration.
+ * @param Generics Type T.
* @return a protocol proxy
- * @throws IOException
+ * @throws IOException if the thread is interrupted.
*/
public static ProtocolProxy getProtocolProxy(Class protocol,
long clientVersion,
@@ -767,75 +808,109 @@ public class RPC {
this.conf = conf;
}
- /** Mandatory field */
+ /**
+ * @return Mandatory field.
+ * @param protocol input protocol.
+ */
public Builder setProtocol(Class> protocol) {
this.protocol = protocol;
return this;
}
- /** Mandatory field */
+ /**
+ * @return Mandatory field.
+ * @param instance input instance.
+ */
public Builder setInstance(Object instance) {
this.instance = instance;
return this;
}
- /** Default: 0.0.0.0 */
+ /**
+ * @return Default: 0.0.0.0.
+ * @param bindAddress input bindAddress.
+ */
public Builder setBindAddress(String bindAddress) {
this.bindAddress = bindAddress;
return this;
}
- /** Default: 0 */
+ /**
+ * @return Default: 0.
+ * @param port input port.
+ */
public Builder setPort(int port) {
this.port = port;
return this;
}
- /** Default: 1 */
+ /**
+ * @return Default: 1.
+ * @param numHandlers input numHandlers.
+ */
public Builder setNumHandlers(int numHandlers) {
this.numHandlers = numHandlers;
return this;
}
- /** Default: -1 */
+ /**
+ * @return Default: -1.
+ * @param numReaders input numReaders.
+ */
public Builder setnumReaders(int numReaders) {
this.numReaders = numReaders;
return this;
}
- /** Default: -1 */
+ /**
+ * @return Default: -1.
+ * @param queueSizePerHandler
+ * input queueSizePerHandler.
+ */
public Builder setQueueSizePerHandler(int queueSizePerHandler) {
this.queueSizePerHandler = queueSizePerHandler;
return this;
}
- /** Default: false */
+ /**
+ * @return Default: false.
+ * @param verbose input verbose.
+ */
public Builder setVerbose(boolean verbose) {
this.verbose = verbose;
return this;
}
- /** Default: null */
+ /**
+ * @return Default: null.
+ * @param secretManager input secretManager.
+ */
public Builder setSecretManager(
SecretManager extends TokenIdentifier> secretManager) {
this.secretManager = secretManager;
return this;
}
- /** Default: null */
+ /**
+ * @return Default: null.
+ * @param portRangeConfig input portRangeConfig.
+ */
public Builder setPortRangeConfig(String portRangeConfig) {
this.portRangeConfig = portRangeConfig;
return this;
}
- /** Default: null */
+ /**
+ * @return Default: null.
+ * @param alignmentContext input alignmentContext.
+ */
public Builder setAlignmentContext(AlignmentContext alignmentContext) {
this.alignmentContext = alignmentContext;
return this;
}
/**
- * Build the RPC Server.
+ * @return Build the RPC Server.
* @throws IOException on error
* @throws HadoopIllegalArgumentException when mandatory fields are not set
*/
@@ -1077,6 +1152,7 @@ public class RPC {
/**
* Add a protocol to the existing server.
+ * @param rpcKind - input rpcKind
* @param protocolClass - the protocol class
* @param protocolImpl - the impl of the protocol that will be called
* @return the server (for convenience)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshCallQueueProtocol.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshCallQueueProtocol.java
index 553f9a00d4c..b1aa0197040 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshCallQueueProtocol.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshCallQueueProtocol.java
@@ -41,7 +41,7 @@ public interface RefreshCallQueueProtocol {
/**
* Refresh the callqueue.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
@Idempotent
void refreshCallQueue() throws IOException;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshRegistry.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshRegistry.java
index 95f1323410f..0cc0b8ba3d8 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshRegistry.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RefreshRegistry.java
@@ -72,6 +72,7 @@ public class RefreshRegistry {
/**
* Remove the registered object for a given identity.
* @param identifier the resource to unregister
+ * @param handler input handler.
* @return the true if removed
*/
public synchronized boolean unregister(String identifier, RefreshHandler handler) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RemoteException.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RemoteException.java
index f1142d35e72..da08c3d152e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RemoteException.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RemoteException.java
@@ -124,8 +124,9 @@ public class RemoteException extends IOException {
}
/**
- * Create RemoteException from attributes
- * @param attrs may not be null
+ * Create RemoteException from attributes.
+ * @param attrs may not be null.
+ * @return RemoteException.
*/
public static RemoteException valueOf(Attributes attrs) {
return new RemoteException(attrs.getValue("class"),
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java
index c9e04ab82b6..3d64a84bfb4 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RetryCache.java
@@ -49,11 +49,11 @@ public class RetryCache {
private static final int MAX_CAPACITY = 16;
/**
- * CacheEntry is tracked using unique client ID and callId of the RPC request
+ * CacheEntry is tracked using unique client ID and callId of the RPC request.
*/
public static class CacheEntry implements LightWeightCache.Entry {
/**
- * Processing state of the requests
+ * Processing state of the requests.
*/
private static byte INPROGRESS = 0;
private static byte SUCCESS = 1;
@@ -233,7 +233,7 @@ public class RetryCache {
}
/**
- * This method returns cache name for metrics.
+ * @return This method returns cache name for metrics.
*/
public String getCacheName() {
return cacheName;
@@ -302,6 +302,9 @@ public class RetryCache {
/**
* Add a new cache entry into the retry cache. The cache entry consists of
* clientId and callId extracted from editlog.
+ *
+ * @param clientId input clientId.
+ * @param callId input callId.
*/
public void addCacheEntry(byte[] clientId, int callId) {
CacheEntry newEntry = new CacheEntry(clientId, callId, System.nanoTime()
@@ -340,7 +343,11 @@ public class RetryCache {
payload, System.nanoTime() + expirationTime);
}
- /** Static method that provides null check for retryCache */
+ /**
+ * Static method that provides null check for retryCache.
+ * @param cache input Cache.
+ * @return CacheEntry.
+ */
public static CacheEntry waitForCompletion(RetryCache cache) {
if (skipRetryCache()) {
return null;
@@ -349,7 +356,12 @@ public class RetryCache {
.waitForCompletion(newEntry(cache.expirationTime)) : null;
}
- /** Static method that provides null check for retryCache */
+ /**
+ * Static method that provides null check for retryCache.
+ * @param cache input cache.
+ * @param payload input payload.
+ * @return CacheEntryWithPayload.
+ */
public static CacheEntryWithPayload waitForCompletion(RetryCache cache,
Object payload) {
if (skipRetryCache()) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java
index 0ce78e54a43..4af35ad9270 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcClientUtil.java
@@ -103,7 +103,7 @@ public class RpcClientUtil {
* @param version The version at the client.
* @param methodName Name of the method.
* @return true if the method is supported, false otherwise.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
public static boolean isMethodSupported(Object rpcProxy, Class> protocol,
RPC.RpcKind rpcKind, long version, String methodName) throws IOException {
@@ -200,6 +200,8 @@ public class RpcClientUtil {
*
* the format we want is:
* ClientNamenodeProtocol#getServerDefaults
+ * @param method input method.
+ * @return methodToTraceString.
*/
public static String methodToTraceString(Method method) {
Class> clazz = method.getDeclaringClass();
@@ -221,6 +223,8 @@ public class RpcClientUtil {
*
* the format we want is:
* ClientProtocol#getBlockLocations
+ * @param fullName input fullName.
+ * @return toTraceName.
*/
public static String toTraceName(String fullName) {
int lastPeriod = fullName.lastIndexOf('.');
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcEngine.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcEngine.java
index 0f5769e7050..afc9d035b09 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcEngine.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcEngine.java
@@ -36,15 +36,44 @@ import org.apache.hadoop.security.token.TokenIdentifier;
@InterfaceStability.Evolving
public interface RpcEngine {
- /** Construct a client-side proxy object.
- * @param */
+ /**
+ * Construct a client-side proxy object.
+ *
+ * @param Generics Type T.
+ * @param protocol input protocol.
+ * @param clientVersion input clientVersion.
+ * @param addr input addr.
+ * @param ticket input ticket.
+ * @param conf input Configuration.
+ * @param factory input factory.
+ * @param rpcTimeout input rpcTimeout.
+ * @param connectionRetryPolicy input connectionRetryPolicy.
+ * @throws IOException raised on errors performing I/O.
+ * @return ProtocolProxy.
+ */
ProtocolProxy getProxy(Class protocol,
long clientVersion, InetSocketAddress addr,
UserGroupInformation ticket, Configuration conf,
SocketFactory factory, int rpcTimeout,
RetryPolicy connectionRetryPolicy) throws IOException;
- /** Construct a client-side proxy object. */
+ /**
+ * Construct a client-side proxy object.
+ *
+ * @param Generics Type T.
+ * @param protocol input protocol.
+ * @param clientVersion input clientVersion.
+ * @param addr input addr.
+ * @param ticket input tocket.
+ * @param conf input Configuration.
+ * @param factory input factory.
+ * @param rpcTimeout input rpcTimeout.
+ * @param connectionRetryPolicy input connectionRetryPolicy.
+ * @param fallbackToSimpleAuth input fallbackToSimpleAuth.
+ * @param alignmentContext input alignmentContext.
+ * @throws IOException raised on errors performing I/O.
+ * @return ProtocolProxy.
+ */
ProtocolProxy getProxy(Class protocol,
long clientVersion, InetSocketAddress addr,
UserGroupInformation ticket, Configuration conf,
@@ -87,7 +116,7 @@ public interface RpcEngine {
* @param conf, Configuration.
* @param factory, Socket factory.
* @return Proxy object.
- * @throws IOException
+ * @throws IOException raised on errors performing I/O.
*/
ProtocolProxy getProtocolMetaInfoProxy(
ConnectionId connId, Configuration conf, SocketFactory factory)
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcScheduler.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcScheduler.java
index 8c423b8e5e1..bffe5f2d257 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcScheduler.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcScheduler.java
@@ -26,7 +26,8 @@ import org.apache.hadoop.ipc.metrics.RpcMetrics;
*/
public interface RpcScheduler {
/**
- * Returns priority level greater than zero as a hint for scheduling.
+ * @return Returns priority level greater than zero as a hint for scheduling.
+ * @param obj input obj.
*/
int getPriorityLevel(Schedulable obj);
@@ -37,6 +38,12 @@ public interface RpcScheduler {
* implementations. It will not be called by any Hadoop code, and should not
* be implemented by new implementations.
*
+ * @param name input name.
+ * @param priorityLevel input priorityLevel.
+ * @param queueTime input queueTime.
+ * @param processingTime input processingTime.
+ * @throws UnsupportedOperationException
+ * the requested operation is not supported.
* @deprecated Use
* {@link #addResponseTime(String, Schedulable, ProcessingDetails)} instead.
*/
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcServerException.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcServerException.java
index 992997ead25..ce4aac54b6c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcServerException.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/RpcServerException.java
@@ -47,14 +47,14 @@ public class RpcServerException extends RpcException {
}
/**
- * get the rpc status corresponding to this exception
+ * @return get the rpc status corresponding to this exception.
*/
public RpcStatusProto getRpcStatusProto() {
return RpcStatusProto.ERROR;
}
/**
- * get the detailed rpc status corresponding to this exception
+ * @return get the detailed rpc status corresponding to this exception.
*/
public RpcErrorCodeProto getRpcErrorCodeProto() {
return RpcErrorCodeProto.ERROR_RPC_SERVER;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
index c5732c68b15..90f730d3883 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/Server.java
@@ -266,10 +266,10 @@ public abstract class Server {
* Register a RPC kind and the class to deserialize the rpc request.
*
* Called by static initializers of rpcKind Engines
- * @param rpcKind
+ * @param rpcKind - input rpcKind.
* @param rpcRequestWrapperClass - this class is used to deserialze the
* the rpc request.
- * @param rpcInvoker - use to process the calls on SS.
+ * @param rpcInvoker - use to process the calls on SS.
*/
public static void registerProtocolEngine(RPC.RpcKind rpcKind,
@@ -328,7 +328,7 @@ public abstract class Server {
return protocol;
}
- /** Returns the server instance called under or null. May be called under
+ /** @return Returns the server instance called under or null. May be called under
* {@link #call(Writable, long)} implementations, and under {@link Writable}
* methods of paramters and return values. Permits applications to access
* the server context.*/
@@ -341,7 +341,7 @@ public abstract class Server {
*/
private static final ThreadLocal CurCall = new ThreadLocal();
- /** Get the current call */
+ /** @return Get the current call. */
@VisibleForTesting
public static ThreadLocal getCurCall() {
return CurCall;
@@ -368,7 +368,8 @@ public abstract class Server {
return call != null ? call.retryCount : RpcConstants.INVALID_RETRY_COUNT;
}
- /** Returns the remote side ip address when invoked inside an RPC
+ /**
+ * @return Returns the remote side ip address when invoked inside an RPC
* Returns null in case of an error.
*/
public static InetAddress getRemoteIp() {
@@ -377,7 +378,7 @@ public abstract class Server {
}
/**
- * Returns the remote side port when invoked inside an RPC
+ * @return Returns the remote side port when invoked inside an RPC
* Returns 0 in case of an error.
*/
public static int getRemotePort() {
@@ -412,14 +413,14 @@ public abstract class Server {
}
/**
- * Returns the clientId from the current RPC request
+ * @return Returns the clientId from the current RPC request.
*/
public static byte[] getClientId() {
Call call = CurCall.get();
return call != null ? call.clientId : RpcConstants.DUMMY_CLIENT_ID;
}
- /** Returns remote address as a string when invoked inside an RPC.
+ /** @return Returns remote address as a string when invoked inside an RPC.
* Returns null in case of an error.
*/
public static String getRemoteAddress() {
@@ -441,14 +442,14 @@ public abstract class Server {
return (call != null) ? call.getProtocol() : null;
}
- /** Return true if the invocation was through an RPC.
+ /** @return Return true if the invocation was through an RPC.
*/
public static boolean isRpcInvocation() {
return CurCall.get() != null;
}
/**
- * Return the priority level assigned by call queue to an RPC
+ * @return Return the priority level assigned by call queue to an RPC
* Returns 0 in case no priority is assigned.
*/
public static int getPriorityLevel() {
@@ -516,7 +517,7 @@ public abstract class Server {
/**
* Sets slow RPC flag.
- * @param logSlowRPCFlag
+ * @param logSlowRPCFlag input logSlowRPCFlag.
*/
@VisibleForTesting
protected void setLogSlowRPC(boolean logSlowRPCFlag) {
@@ -707,6 +708,9 @@ public abstract class Server {
/**
* Refresh the service authorization ACL for the service handled by this server.
+ *
+ * @param conf input Configuration.
+ * @param provider input PolicyProvider.
*/
public void refreshServiceAcl(Configuration conf, PolicyProvider provider) {
serviceAuthorizationManager.refresh(conf, provider);
@@ -715,6 +719,9 @@ public abstract class Server {
/**
* Refresh the service authorization ACL for the service handled by this server
* using the specified Configuration.
+ *
+ * @param conf input Configuration.
+ * @param provider input provider.
*/
@Private
public void refreshServiceAclWithLoadedConfiguration(Configuration conf,
@@ -2380,7 +2387,7 @@ public abstract class Server {
* @return -1 in case of error, else num bytes read so far
* @throws IOException - internal error that should not be returned to
* client, typically failure to respond to client
- * @throws InterruptedException
+ * @throws InterruptedException - if the thread is interrupted.
*/
public int readAndProcess() throws IOException, InterruptedException {
while (!shouldClose()) { // stop if a fatal response has been sent.
@@ -3198,6 +3205,18 @@ public abstract class Server {
* Class, RPC.RpcInvoker)}
* This parameter has been retained for compatibility with existing tests
* and usage.
+ *
+ * @param bindAddress input bindAddress.
+ * @param port input port.
+ * @param rpcRequestClass input rpcRequestClass.
+ * @param handlerCount input handlerCount.
+ * @param numReaders input numReaders.
+ * @param queueSizePerHandler input queueSizePerHandler.
+ * @param conf input Configuration.
+ * @param serverName input serverName.
+ * @param secretManager input secretManager.
+ * @param portRangeConfig input portRangeConfig.
+ * @throws IOException raised on errors performing I/O.
*/
@SuppressWarnings("unchecked")
protected Server(String bindAddress, int port,
@@ -3530,7 +3549,10 @@ public abstract class Server {
return conf;
}
- /** Sets the socket buffer size used for responding to RPCs */
+ /**
+ * Sets the socket buffer size used for responding to RPCs.
+ * @param size input size.
+ */
public void setSocketSendBufSize(int size) { this.socketSendBufferSize = size; }
public void setTracer(Tracer t) {
@@ -3580,9 +3602,11 @@ public abstract class Server {
this.rpcDetailedMetrics.shutdown();
}
- /** Wait for the server to be stopped.
+ /**
+ * Wait for the server to be stopped.
* Does not wait for all subthreads to finish.
* See {@link #stop()}.
+ * @throws InterruptedException if the thread is interrupted.
*/
public synchronized void join() throws InterruptedException {
while (running) {
@@ -3619,13 +3643,25 @@ public abstract class Server {
* Called for each call.
* @deprecated Use {@link #call(RPC.RpcKind, String,
* Writable, long)} instead
+ * @param param input param.
+ * @param receiveTime input receiveTime.
+ * @throws Exception if any error occurs.
+ * @return Call
*/
@Deprecated
public Writable call(Writable param, long receiveTime) throws Exception {
return call(RPC.RpcKind.RPC_BUILTIN, null, param, receiveTime);
}
- /** Called for each call. */
+ /**
+ * Called for each call.
+ * @param rpcKind input rpcKind.
+ * @param protocol input protocol.
+ * @param param input param.
+ * @param receiveTime input receiveTime.
+ * @return Call.
+ * @throws Exception raised on errors performing I/O.
+ */
public abstract Writable call(RPC.RpcKind rpcKind, String protocol,
Writable param, long receiveTime) throws Exception;
@@ -3673,7 +3709,7 @@ public abstract class Server {
}
/**
- * Get the NumOpenConnections/User.
+ * @return Get the NumOpenConnections/User.
*/
public String getNumOpenConnectionsPerUser() {
ObjectMapper mapper = new ObjectMapper();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/VersionedProtocol.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/VersionedProtocol.java
index 4d02027a0e6..98daa841874 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/VersionedProtocol.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/VersionedProtocol.java
@@ -46,6 +46,7 @@ public interface VersionedProtocol {
* a list of its supported methods
* @see ProtocolSignature#getProtocolSignature(VersionedProtocol, String,
* long, int) for a default implementation
+ * @throws IOException raised on errors performing I/O.
*/
public ProtocolSignature getProtocolSignature(String protocol,
long clientVersion,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java
index d790e49f5dc..21181f860d9 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/WritableRpcEngine.java
@@ -282,9 +282,20 @@ public class WritableRpcEngine implements RpcEngine {
return CLIENTS.getClient(conf);
}
- /** Construct a client-side proxy object that implements the named protocol,
+ /**
+ * Construct a client-side proxy object that implements the named protocol,
* talking to a server at the named address.
- * @param */
+ * @param Generics Type T
+ * @param protocol input protocol.
+ * @param clientVersion input clientVersion.
+ * @param addr input addr.
+ * @param ticket input ticket.
+ * @param conf input configuration.
+ * @param factory input factory.
+ * @param rpcTimeout input rpcTimeout.
+ * @param connectionRetryPolicy input connectionRetryPolicy.
+ * @throws IOException raised on errors performing I/O.
+ */
@Override
public ProtocolProxy getProxy(Class protocol, long clientVersion,
InetSocketAddress addr, UserGroupInformation ticket,
@@ -295,9 +306,22 @@ public class WritableRpcEngine implements RpcEngine {
rpcTimeout, connectionRetryPolicy, null, null);
}
- /** Construct a client-side proxy object that implements the named protocol,
+ /**
+ * Construct a client-side proxy object that implements the named protocol,
* talking to a server at the named address.
- * @param */
+ * @param Generics Type.
+ * @param protocol input protocol.
+ * @param clientVersion input clientVersion.
+ * @param addr input addr.
+ * @param ticket input ticket.
+ * @param conf input configuration.
+ * @param factory input factory.
+ * @param rpcTimeout input rpcTimeout.
+ * @param connectionRetryPolicy input connectionRetryPolicy.
+ * @param fallbackToSimpleAuth input fallbackToSimpleAuth.
+ * @param alignmentContext input alignmentContext.
+ * @return ProtocolProxy.
+ */
@Override
@SuppressWarnings("unchecked")
public ProtocolProxy getProxy(Class protocol, long clientVersion,
@@ -345,7 +369,8 @@ public class WritableRpcEngine implements RpcEngine {
* @param bindAddress the address to bind on to listen for connection
* @param port the port to listen for connections on
*
- * @deprecated Use #Server(Class, Object, Configuration, String, int)
+ * @deprecated Use #Server(Class, Object, Configuration, String, int)
+ * @throws IOException raised on errors performing I/O.
*/
@Deprecated
public Server(Object instance, Configuration conf, String bindAddress,
@@ -360,6 +385,7 @@ public class WritableRpcEngine implements RpcEngine {
* @param conf the configuration to use
* @param bindAddress the address to bind on to listen for connection
* @param port the port to listen for connections on
+ * @throws IOException raised on errors performing I/O.
*/
public Server(Class> protocolClass, Object protocolImpl,
Configuration conf, String bindAddress, int port)
@@ -376,9 +402,13 @@ public class WritableRpcEngine implements RpcEngine {
* @param port the port to listen for connections on
* @param numHandlers the number of method handler threads to run
* @param verbose whether each call should be logged
+ * @param numReaders input numberReaders.
+ * @param queueSizePerHandler input queueSizePerHandler.
+ * @param secretManager input secretManager.
*
* @deprecated use Server#Server(Class, Object,
* Configuration, String, int, int, int, int, boolean, SecretManager)
+ * @throws IOException raised on errors performing I/O.
*/
@Deprecated
public Server(Object protocolImpl, Configuration conf, String bindAddress,
@@ -401,9 +431,14 @@ public class WritableRpcEngine implements RpcEngine {
* @param port the port to listen for connections on
* @param numHandlers the number of method handler threads to run
* @param verbose whether each call should be logged
+ * @param secretManager input secretManager.
+ * @param queueSizePerHandler input queueSizePerHandler.
+ * @param portRangeConfig input portRangeConfig.
+ * @param numReaders input numReaders.
*
* @deprecated use Server#Server(Class, Object,
* Configuration, String, int, int, int, int, boolean, SecretManager)
+ * @throws IOException raised on errors performing I/O.
*/
@Deprecated
public Server(Class> protocolClass, Object protocolImpl,
@@ -428,6 +463,11 @@ public class WritableRpcEngine implements RpcEngine {
* @param numHandlers the number of method handler threads to run
* @param verbose whether each call should be logged
* @param alignmentContext provides server state info on client responses
+ * @param numReaders input numReaders.
+ * @param portRangeConfig input portRangeConfig.
+ * @param queueSizePerHandler input queueSizePerHandler.
+ * @param secretManager input secretManager.
+ * @throws IOException raised on errors performing I/O.
*/
public Server(Class> protocolClass, Object protocolImpl,
Configuration conf, String bindAddress, int port,
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/DecayRpcSchedulerDetailedMetrics.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/DecayRpcSchedulerDetailedMetrics.java
index b86381706d6..0bfe5c7d880 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/DecayRpcSchedulerDetailedMetrics.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/ipc/metrics/DecayRpcSchedulerDetailedMetrics.java
@@ -65,6 +65,7 @@ public class DecayRpcSchedulerDetailedMetrics {
/**
* Initialize the metrics for JMX with priority levels.
+ * @param numLevels input numLevels.
*/
public void init(int numLevels) {
LOG.info("Initializing RPC stats for {} priority levels", numLevels);
@@ -106,14 +107,16 @@ public class DecayRpcSchedulerDetailedMetrics {
}
/**
- * Returns the rate name inside the metric.
+ * @return Returns the rate name inside the metric.
+ * @param priority input priority.
*/
public String getQueueName(int priority) {
return "DecayRPCSchedulerPriority."+priority+".RpcQueueTime";
}
/**
- * Returns the rate name inside the metric.
+ * @return Returns the rate name inside the metric.
+ * @param priority input priority.
*/
public String getProcessingName(int priority) {
return "DecayRPCSchedulerPriority."+priority+".RpcProcessingTime";
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java
index f20933b5c86..85f2d282856 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/jmx/JMXJsonServlet.java
@@ -64,27 +64,31 @@ import java.util.Set;
* functionality is provided through the
* {@link MBeanServer#queryNames(ObjectName, javax.management.QueryExp)}
* method.
+ *
*
* For example http://.../jmx?qry=Hadoop:*
will return
* all hadoop metrics exposed through JMX.
+ *
*
* The optional get
parameter is used to query an specific
* attribute of a JMX bean. The format of the URL is
* http://.../jmx?get=MXBeanName::AttributeName
+ *
*
* For example
*
* http://../jmx?get=Hadoop:service=NameNode,name=NameNodeInfo::ClusterId
*
will return the cluster id of the namenode mxbean.
+ *
*
* If the qry
or the get
parameter is not formatted
- * correctly then a 400 BAD REQUEST http response code will be returned.
+ * correctly then a 400 BAD REQUEST http response code will be returned.
+ *
*
* If a resouce such as a mbean or attribute can not be found,
* a 404 SC_NOT_FOUND http response code will be returned.
- *
+ *
* The return format is JSON and in the form
- *
*
* {
* "beans" : [
@@ -95,7 +99,6 @@ import java.util.Set;
* ]
* }
*
- *
* The servlet attempts to convert the the JMXBeans into JSON. Each
* bean's attributes will be converted to a JSON object member.
*
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java
index c8a88236aeb..e2ad16fce2c 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogLevel.java
@@ -66,6 +66,8 @@ public class LogLevel {
public static final String PROTOCOL_HTTPS = "https";
/**
* A command line implementation
+ * @param args input args.
+ * @throws Exception exception.
*/
public static void main(String[] args) throws Exception {
CLI cli = new CLI(new Configuration());
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogThrottlingHelper.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogThrottlingHelper.java
index 622ee5405c8..af5f8521433 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogThrottlingHelper.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/log/LogThrottlingHelper.java
@@ -88,21 +88,22 @@ public class LogThrottlingHelper {
public interface LogAction {
/**
- * Return the number of records encapsulated in this action; that is, the
+ * @return Return the number of records encapsulated in this action; that is, the
* number of times {@code record} was called to produce this action,
* including the current one.
*/
int getCount();
/**
- * Return summary information for the value that was recorded at index
+ * @return Return summary information for the value that was recorded at index
* {@code idx}. Corresponds to the ordering of values passed to
* {@link #record(double...)}.
+ * @param idx input idx.
*/
SummaryStatistics getStats(int idx);
/**
- * If this is true, the caller should write to its log. Otherwise, the
+ * @return If this is true, the caller should write to its log. Otherwise, the
* caller should take no action, and it is an error to call other methods
* on this object.
*/
@@ -139,6 +140,7 @@ public class LogThrottlingHelper {
* Create a log helper without any primary recorder.
*
* @see #LogThrottlingHelper(long, String)
+ * @param minLogPeriodMs input minLogPeriodMs.
*/
public LogThrottlingHelper(long minLogPeriodMs) {
this(minLogPeriodMs, null);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystem.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystem.java
index a277abd6e13..fef8c4b7e4b 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystem.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystem.java
@@ -50,7 +50,7 @@ public abstract class MetricsSystem implements MetricsSystemMXBean {
* the annotations of the source object.)
* @param desc the description of the source (or null. See above.)
* @return the source object
- * @exception MetricsException
+ * @exception MetricsException Metrics Exception.
*/
public abstract T register(String name, String desc, T source);
@@ -65,7 +65,7 @@ public abstract class MetricsSystem implements MetricsSystemMXBean {
* @param the actual type of the source object
* @param source object to register
* @return the source object
- * @exception MetricsException
+ * @exception MetricsException Metrics Exception.
*/
public T register(T source) {
return register(null, null, source);
@@ -85,7 +85,7 @@ public abstract class MetricsSystem implements MetricsSystemMXBean {
* @param name of the sink. Must be unique.
* @param desc the description of the sink
* @return the sink
- * @exception MetricsException
+ * @exception MetricsException Metrics Exception.
*/
public abstract
T register(String name, String desc, T sink);
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystemMXBean.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystemMXBean.java
index e471ab7498c..8656da6f316 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystemMXBean.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/MetricsSystemMXBean.java
@@ -29,19 +29,19 @@ import org.apache.hadoop.classification.InterfaceStability;
public interface MetricsSystemMXBean {
/**
* Start the metrics system
- * @throws MetricsException
+ * @throws MetricsException Metrics Exception.
*/
public void start();
/**
* Stop the metrics system
- * @throws MetricsException
+ * @throws MetricsException Metrics Exception.
*/
public void stop();
/**
* Start metrics MBeans
- * @throws MetricsException
+ * @throws MetricsException Metrics Exception.
*/
public void startMetricsMBeans();
@@ -49,7 +49,7 @@ public interface MetricsSystemMXBean {
* Stop metrics MBeans.
* Note, it doesn't stop the metrics system control MBean,
* i.e this interface.
- * @throws MetricsException
+ * @throws MetricsException Metrics Exception.
*/
public void stopMetricsMBeans();
@@ -57,7 +57,7 @@ public interface MetricsSystemMXBean {
* @return the current config
* Avoided getConfig, as it'll turn into a "Config" attribute,
* which doesn't support multiple line values in jconsole.
- * @throws MetricsException
+ * @throws MetricsException Metrics Exception.
*/
public String currentConfig();
}
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableMetricsFactory.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableMetricsFactory.java
index c7adaa5d991..e4886cb603e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableMetricsFactory.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableMetricsFactory.java
@@ -146,8 +146,10 @@ public class MutableMetricsFactory {
}
/**
- * Remove the prefix "get", if any, from the method name. Return the
+ * @return Remove the prefix "get", if any, from the method name. Return the
* capacitalized method name."
+ *
+ * @param method input method.
*/
protected String getName(Method method) {
String methodName = method.getName();
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRollingAverages.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRollingAverages.java
index aa4d4b9ca0c..016ecdd4d1e 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRollingAverages.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableRollingAverages.java
@@ -139,7 +139,7 @@ public class MutableRollingAverages extends MutableMetric implements Closeable {
/**
* Constructor for {@link MutableRollingAverages}.
- * @param metricValueName
+ * @param metricValueName input metricValueName.
*/
public MutableRollingAverages(String metricValueName) {
if (metricValueName == null) {
@@ -285,6 +285,7 @@ public class MutableRollingAverages extends MutableMetric implements Closeable {
* Retrieve a map of metric name {@literal ->} (aggregate).
* Filter out entries that don't have at least minSamples.
*
+ * @param minSamples input minSamples.
* @return a map of peer DataNode Id to the average latency to that
* node seen over the measurement period.
*/
@@ -314,6 +315,7 @@ public class MutableRollingAverages extends MutableMetric implements Closeable {
/**
* Use for test only.
+ * @param value input value.
*/
@VisibleForTesting
public synchronized void setRecordValidityMs(long value) {
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableStat.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableStat.java
index e04b4b58ece..f2e072545ad 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableStat.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/lib/MutableStat.java
@@ -179,7 +179,7 @@ public class MutableStat extends MutableMetric {
}
/**
- * Return the SampleStat snapshot timestamp
+ * @return Return the SampleStat snapshot timestamp.
*/
public long getSnapshotTimeStamp() {
return snapshotTimeStamp;
diff --git a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/package-info.java b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/package-info.java
index 8fd3b33b3a2..196469be9dc 100644
--- a/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/package-info.java
+++ b/hadoop-common-project/hadoop-common/src/main/java/org/apache/hadoop/metrics2/package-info.java
@@ -87,7 +87,7 @@
Implementing metrics sources
-
+ Implementing metrics sources