HADOOP-13419. Fix javadoc warnings by JDK8 in hadoop-common package. Contributed by Kai Sasaki.

(cherry picked from commit a13a607e20)
This commit is contained in:
Masatake Iwasaki 2016-10-16 12:44:19 +09:00
parent 1c473898aa
commit 74782e7e42
8 changed files with 43 additions and 87 deletions

View File

@ -315,7 +315,7 @@ public class FileContext {
* *
* @throws UnsupportedFileSystemException If the file system for * @throws UnsupportedFileSystemException If the file system for
* <code>absOrFqPath</code> is not supported. * <code>absOrFqPath</code> is not supported.
* @throws IOExcepton If the file system for <code>absOrFqPath</code> could * @throws IOException If the file system for <code>absOrFqPath</code> could
* not be instantiated. * not be instantiated.
*/ */
protected AbstractFileSystem getFSofPath(final Path absOrFqPath) protected AbstractFileSystem getFSofPath(final Path absOrFqPath)
@ -2725,7 +2725,7 @@ public class FileContext {
/** /**
* Query the effective storage policy ID for the given file or directory. * Query the effective storage policy ID for the given file or directory.
* *
* @param src file or directory path. * @param path file or directory path.
* @return storage policy for give file. * @return storage policy for give file.
* @throws IOException * @throws IOException
*/ */

View File

@ -52,7 +52,7 @@ public interface FenceMethod {
/** /**
* Attempt to fence the target node. * Attempt to fence the target node.
* @param serviceAddr the address (host:ipcport) of the service to fence * @param target the address (host:ipcport) of the service to fence
* @param args the configured arguments, which were checked at startup by * @param args the configured arguments, which were checked at startup by
* {@link #checkArgs(String)} * {@link #checkArgs(String)}
* @return true if fencing was successful, false if unsuccessful or * @return true if fencing was successful, false if unsuccessful or

View File

@ -151,12 +151,13 @@ public interface HAServiceProtocol {
/** /**
* Return the current status of the service. The status indicates * Return the current status of the service. The status indicates
* the current <em>state</em> (e.g ACTIVE/STANDBY) as well as * the current <em>state</em> (e.g ACTIVE/STANDBY) as well as
* some additional information. {@see HAServiceStatus} * some additional information.
* *
* @throws AccessControlException * @throws AccessControlException
* if access is denied. * if access is denied.
* @throws IOException * @throws IOException
* if other errors happen * if other errors happen
* @see HAServiceStatus
*/ */
@Idempotent @Idempotent
public HAServiceStatus getServiceStatus() throws AccessControlException, public HAServiceStatus getServiceStatus() throws AccessControlException,

View File

@ -15,6 +15,28 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
/**
* A mechanism for selectively retrying methods that throw exceptions under
* certain circumstances.
* Typical usage is
* UnreliableImplementation unreliableImpl = new UnreliableImplementation();
* UnreliableInterface unreliable = (UnreliableInterface)
* RetryProxy.create(UnreliableInterface.class, unreliableImpl,
* RetryPolicies.retryUpToMaximumCountWithFixedSleep(4, 10,
* TimeUnit.SECONDS));
* unreliable.call();
*
* This will retry any method called on <code>unreliable</code> four times -
* in this case the <code>call()</code> method - sleeping 10 seconds between
* each retry. There are a number of
* {@link org.apache.hadoop.io.retry.RetryPolicies retry policies}
* available, or you can implement a custom one by implementing
* {@link org.apache.hadoop.io.retry.RetryPolicy}.
* It is also possible to specify retry policies on a
* {@link org.apache.hadoop.io.retry.RetryProxy#create(Class, Object, Map)
* per-method basis}.
*/
@InterfaceAudience.LimitedPrivate({"HBase", "HDFS", "MapReduce"}) @InterfaceAudience.LimitedPrivate({"HBase", "HDFS", "MapReduce"})
@InterfaceStability.Evolving @InterfaceStability.Evolving
package org.apache.hadoop.io.retry; package org.apache.hadoop.io.retry;

View File

@ -1,48 +0,0 @@
<html>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<body>
<p>
A mechanism for selectively retrying methods that throw exceptions under certain circumstances.
</p>
<p>
Typical usage is
</p>
<pre>
UnreliableImplementation unreliableImpl = new UnreliableImplementation();
UnreliableInterface unreliable = (UnreliableInterface)
RetryProxy.create(UnreliableInterface.class, unreliableImpl,
RetryPolicies.retryUpToMaximumCountWithFixedSleep(4, 10, TimeUnit.SECONDS));
unreliable.call();
</pre>
<p>
This will retry any method called on <code>unreliable</code> four times - in this case the <code>call()</code>
method - sleeping 10 seconds between
each retry. There are a number of {@link org.apache.hadoop.io.retry.RetryPolicies retry policies}
available, or you can implement a custom one by implementing {@link org.apache.hadoop.io.retry.RetryPolicy}.
It is also possible to specify retry policies on a
{@link org.apache.hadoop.io.retry.RetryProxy#create(Class, Object, Map) per-method basis}.
</p>
</body>
</html>

View File

@ -2035,7 +2035,7 @@ public abstract class Server {
} }
/** Reads the connection context following the connection header /** Reads the connection context following the connection header
* @param dis - DataInputStream from which to read the header * @param buffer - DataInputStream from which to read the header
* @throws WrappedRpcServerException - if the header cannot be * @throws WrappedRpcServerException - if the header cannot be
* deserialized, or the user is not authorized * deserialized, or the user is not authorized
*/ */
@ -2087,7 +2087,7 @@ public abstract class Server {
/** /**
* Process a wrapped RPC Request - unwrap the SASL packet and process * Process a wrapped RPC Request - unwrap the SASL packet and process
* each embedded RPC request * each embedded RPC request
* @param buf - SASL wrapped request of one or more RPCs * @param inBuf - SASL wrapped request of one or more RPCs
* @throws IOException - SASL packet cannot be unwrapped * @throws IOException - SASL packet cannot be unwrapped
* @throws InterruptedException * @throws InterruptedException
*/ */
@ -2131,7 +2131,7 @@ public abstract class Server {
/** /**
* Process an RPC Request - handle connection setup and decoding of * Process an RPC Request - handle connection setup and decoding of
* request into a Call * request into a Call
* @param buf - contains the RPC request header and the rpc request * @param bb - contains the RPC request header and the rpc request
* @throws IOException - internal error that should not be returned to * @throws IOException - internal error that should not be returned to
* client, typically failure to respond to client * client, typically failure to respond to client
* @throws WrappedRpcServerException - an exception to be sent back to * @throws WrappedRpcServerException - an exception to be sent back to
@ -2207,7 +2207,7 @@ public abstract class Server {
* Process an RPC Request - the connection headers and context must * Process an RPC Request - the connection headers and context must
* have been already read * have been already read
* @param header - RPC request header * @param header - RPC request header
* @param dis - stream to request payload * @param buffer - stream to request payload
* @throws WrappedRpcServerException - due to fatal rpc layer issues such * @throws WrappedRpcServerException - due to fatal rpc layer issues such
* as invalid header or deserialization error. In this case a RPC fatal * as invalid header or deserialization error. In this case a RPC fatal
* status response will later be sent back to client. * status response will later be sent back to client.
@ -2283,7 +2283,7 @@ public abstract class Server {
* Establish RPC connection setup by negotiating SASL if required, then * Establish RPC connection setup by negotiating SASL if required, then
* reading and authorizing the connection header * reading and authorizing the connection header
* @param header - RPC header * @param header - RPC header
* @param dis - stream to request payload * @param buffer - stream to request payload
* @throws WrappedRpcServerException - setup failed due to SASL * @throws WrappedRpcServerException - setup failed due to SASL
* negotiation failure, premature or invalid connection context, * negotiation failure, premature or invalid connection context,
* or other state errors * or other state errors
@ -2351,8 +2351,8 @@ public abstract class Server {
/** /**
* Decode the a protobuf from the given input stream * Decode the a protobuf from the given input stream
* @param builder - Builder of the protobuf to decode * @param message - Representation of the type of message
* @param dis - DataInputStream to read the protobuf * @param buffer - a buffer to read the protobuf
* @return Message - decoded protobuf * @return Message - decoded protobuf
* @throws WrappedRpcServerException - deserialization failed * @throws WrappedRpcServerException - deserialization failed
*/ */
@ -2522,8 +2522,8 @@ public abstract class Server {
* from configuration. Otherwise the configuration will be picked up. * from configuration. Otherwise the configuration will be picked up.
* *
* If rpcRequestClass is null then the rpcRequestClass must have been * If rpcRequestClass is null then the rpcRequestClass must have been
* registered via {@link #registerProtocolEngine(RpcPayloadHeader.RpcKind, * registered via {@link #registerProtocolEngine(RPC.RpcKind, Class,
* Class, RPC.RpcInvoker)} * RPC.RpcInvoker)}.
* This parameter has been retained for compatibility with existing tests * This parameter has been retained for compatibility with existing tests
* and usage. * and usage.
*/ */
@ -2881,8 +2881,8 @@ public abstract class Server {
/** /**
* Called for each call. * Called for each call.
* @deprecated Use {@link #call(RpcPayloadHeader.RpcKind, String, * @deprecated Use {@link Server#call(RPC.RpcKind, String, Writable, long)}
* Writable, long)} instead * instead.
*/ */
@Deprecated @Deprecated
public Writable call(Writable param, long receiveTime) throws Exception { public Writable call(Writable param, long receiveTime) throws Exception {

View File

@ -15,6 +15,10 @@
* See the License for the specific language governing permissions and * See the License for the specific language governing permissions and
* limitations under the License. * limitations under the License.
*/ */
/**
* Tools to help define network clients and servers.
*/
@InterfaceAudience.LimitedPrivate({"HBase", "HDFS", "MapReduce"}) @InterfaceAudience.LimitedPrivate({"HBase", "HDFS", "MapReduce"})
@InterfaceStability.Evolving @InterfaceStability.Evolving
package org.apache.hadoop.ipc; package org.apache.hadoop.ipc;

View File

@ -1,23 +0,0 @@
<html>
<!--
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to You under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-->
<body>
Tools to help define network clients and servers.
</body>
</html>