HBASE-7426 Fix PreCheckin script to error out when there are Javadoc errors.

git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1425281 13f79535-47bb-0310-9956-ffa450edef68
This commit is contained in:
eclark 2012-12-22 13:43:41 +00:00
parent 08671a4129
commit dceffb1bda
9 changed files with 17 additions and 17 deletions

View File

@ -20,4 +20,4 @@ MAVEN_OPTS="-Xmx3g"
OK_RELEASEAUDIT_WARNINGS=84
OK_FINDBUGS_WARNINGS=517
OK_JAVADOC_WARNINGS=169
OK_JAVADOC_WARNINGS=0

View File

@ -37,15 +37,15 @@ public abstract class Batch {
*
* <p>
* When used with
* {@link org.apache.hadoop.hbase.client.HTable#coprocessorExec(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call, org.apache.hadoop.hbase.client.coprocessor.Batch.Callback)}
* {@link org.apache.hadoop.hbase.client.HTable#coprocessorService(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call)}
* the implementations {@link Batch.Call#call(Object)} method will be invoked
* with a proxy to the
* {@link org.apache.hadoop.hbase.ipc.CoprocessorProtocol}
* {@link org.apache.hadoop.hbase.coprocessor.CoprocessorService}
* sub-type instance.
* </p>
* @see org.apache.hadoop.hbase.client.coprocessor
* @see org.apache.hadoop.hbase.client.HTable#coprocessorExec(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call)
* @see org.apache.hadoop.hbase.client.HTable#coprocessorExec(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call, org.apache.hadoop.hbase.client.coprocessor.Batch.Callback)
* @see org.apache.hadoop.hbase.client.HTable#coprocessorService(byte[])
* @see org.apache.hadoop.hbase.client.HTable#coprocessorService(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call)
* @param <T> the instance type to be passed to
* {@link Batch.Call#call(Object)}
* @param <R> the return type from {@link Batch.Call#call(Object)}
@ -60,13 +60,13 @@ public abstract class Batch {
*
* <p>
* When used with
* {@link org.apache.hadoop.hbase.client.HTable#coprocessorExec(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call, org.apache.hadoop.hbase.client.coprocessor.Batch.Callback)},
* {@link org.apache.hadoop.hbase.client.HTable#coprocessorService(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call)}
* the implementation's {@link Batch.Callback#update(byte[], byte[], Object)}
* method will be called with the {@link Batch.Call#call(Object)} return value
* from each region in the selected range.
* </p>
* @param <R> the return type from the associated {@link Batch.Call#call(Object)}
* @see org.apache.hadoop.hbase.client.HTable#coprocessorExec(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call, org.apache.hadoop.hbase.client.coprocessor.Batch.Callback)
* @see org.apache.hadoop.hbase.client.HTable#coprocessorService(Class, byte[], byte[], org.apache.hadoop.hbase.client.coprocessor.Batch.Call)
*/
public static interface Callback<R> {
public void update(byte[] region, byte[] row, R result);

View File

@ -37,7 +37,8 @@ import java.util.TreeSet;
* <p>
* Note : It may emit KVs which do not have the given columns in them, if
* these KVs happen to occur before a KV which does have a match. Given this
* caveat, this filter is only useful for special cases like {@link RowCounter}.
* caveat, this filter is only useful for special cases
* like {@link org.apache.hadoop.hbase.mapreduce.RowCounter}.
* <p>
*/
@InterfaceAudience.Public

View File

@ -54,7 +54,7 @@ public class ChecksumUtil {
* compute checkums from
* @param endOffset ending offset in the indata stream upto
* which checksums needs to be computed
* @param outData the output buffer where checksum values are written
* @param outdata the output buffer where checksum values are written
* @param outOffset the starting offset in the outdata where the
* checksum values are written
* @param checksumType type of checksum

View File

@ -222,7 +222,6 @@ public class FixedFileTrailer {
* {@link #serialize(DataOutputStream)}.
*
* @param inputStream
* @param version
* @throws IOException
*/
void deserialize(DataInputStream inputStream) throws IOException {

View File

@ -1137,7 +1137,7 @@ public final class RequestConverter {
* @param family optional column family
* @param qualifier optional qualifier
* @param actions the permissions to be granted
* @return A {@link AccessControlProtos.GrantRequest)
* @return A {@link AccessControlProtos} GrantRequest
*/
public static AccessControlProtos.GrantRequest buildGrantRequest(
String username, byte[] table, byte[] family, byte[] qualifier,
@ -1173,7 +1173,7 @@ public final class RequestConverter {
* @param family optional column family
* @param qualifier optional qualifier
* @param actions the permissions to be revoked
* @return A {@link AccessControlProtos.RevokeRequest)
* @return A {@link AccessControlProtos} RevokeRequest
*/
public static AccessControlProtos.RevokeRequest buildRevokeRequest(
String username, byte[] table, byte[] family, byte[] qualifier,

View File

@ -57,7 +57,7 @@ public class ReplicationHLogReaderManager {
/**
* Opens the file at the current position
* @param path
* @return
* @return an HLog reader.
* @throws IOException
*/
public HLog.Reader openReader(Path path) throws IOException {

View File

@ -92,13 +92,13 @@ import static org.apache.hadoop.hbase.protobuf.generated.AccessControlProtos.Acc
*
* <p>
* To perform authorization checks, {@code AccessController} relies on the
* {@link org.apache.hadoop.hbase.ipc.SecureRpcEngine} being loaded to provide
* {@link org.apache.hadoop.hbase.ipc.RpcServerEngine} being loaded to provide
* the user identities for remote requests.
* </p>
*
* <p>
* The access control lists used for authorization can be manipulated via the
* exposed {@link AccessControlService.Interface} implementation, and the associated
* exposed {@link AccessControlService} Interface implementation, and the associated
* {@code grant}, {@code revoke}, and {@code user_permission} HBase shell
* commands.
* </p>

View File

@ -44,7 +44,7 @@ import org.apache.hadoop.security.token.Token;
/**
* Provides a service for obtaining authentication tokens via the
* {@link AuthenticationProtos.AuthenticationService} coprocessor service.
* {@link AuthenticationProtos} AuthenticationService coprocessor service.
*/
public class TokenProvider implements AuthenticationProtos.AuthenticationService.Interface,
Coprocessor, CoprocessorService {
@ -73,7 +73,7 @@ public class TokenProvider implements AuthenticationProtos.AuthenticationService
}
/**
* @param ugi
* @param ugi A user group information.
* @return true if delegation token operation is allowed
*/
private boolean isAllowedDelegationTokenOp(UserGroupInformation ugi) throws IOException {