From 4cc31baec3b3fa39347cb8465e0036c92c0b183d Mon Sep 17 00:00:00 2001 From: Michael Stack Date: Mon, 17 Dec 2012 19:08:02 +0000 Subject: [PATCH] HBASE-7361 Fix all javadoc warnings in hbase-server/{,mapreduce} git-svn-id: https://svn.apache.org/repos/asf/hbase/trunk@1423096 13f79535-47bb-0310-9956-ffa450edef68 --- .../apache/hadoop/hbase/HTableDescriptor.java | 3 +- .../hadoop/hbase/MasterAdminProtocol.java | 4 +- .../apache/hadoop/hbase/client/Action.java | 4 +- .../hadoop/hbase/client/HBaseAdmin.java | 2 +- .../apache/hadoop/hbase/client/HTable.java | 7 ++-- .../hadoop/hbase/client/HTableInterface.java | 42 ++++++++++--------- .../hadoop/hbase/client/ServerCallable.java | 2 - .../coprocessor/BaseRowProcessorEndpoint.java | 4 +- .../hbase/coprocessor/RegionObserver.java | 2 +- .../hadoop/hbase/filter/BinaryComparator.java | 2 +- .../hbase/filter/BinaryPrefixComparator.java | 2 +- .../hadoop/hbase/filter/BitComparator.java | 2 +- .../hbase/filter/ByteArrayComparable.java | 2 +- .../hbase/filter/ColumnCountGetFilter.java | 2 +- .../hbase/filter/ColumnPaginationFilter.java | 2 +- .../hbase/filter/ColumnPrefixFilter.java | 2 +- .../hbase/filter/ColumnRangeFilter.java | 2 +- .../hbase/filter/DependentColumnFilter.java | 2 +- .../hadoop/hbase/filter/FamilyFilter.java | 2 +- .../apache/hadoop/hbase/filter/Filter.java | 2 +- .../hadoop/hbase/filter/FilterList.java | 2 +- .../hadoop/hbase/filter/FilterWrapper.java | 2 +- .../hbase/filter/FirstKeyOnlyFilter.java | 2 +- ...FirstKeyValueMatchingQualifiersFilter.java | 2 +- .../hadoop/hbase/filter/FuzzyRowFilter.java | 2 +- .../hbase/filter/InclusiveStopFilter.java | 2 +- .../hadoop/hbase/filter/KeyOnlyFilter.java | 2 +- .../filter/MultipleColumnPrefixFilter.java | 2 +- .../hadoop/hbase/filter/NullComparator.java | 2 +- .../hadoop/hbase/filter/PageFilter.java | 2 +- .../hadoop/hbase/filter/PrefixFilter.java | 2 +- .../hadoop/hbase/filter/QualifierFilter.java | 2 +- .../hadoop/hbase/filter/RandomRowFilter.java | 2 +- .../hbase/filter/RegexStringComparator.java | 4 +- .../apache/hadoop/hbase/filter/RowFilter.java | 2 +- .../SingleColumnValueExcludeFilter.java | 2 +- .../hbase/filter/SingleColumnValueFilter.java | 2 +- .../hadoop/hbase/filter/SkipFilter.java | 2 +- .../hbase/filter/SubstringComparator.java | 2 +- .../hadoop/hbase/filter/TimestampsFilter.java | 2 +- .../hadoop/hbase/filter/ValueFilter.java | 2 +- .../hadoop/hbase/filter/WhileMatchFilter.java | 2 +- .../apache/hadoop/hbase/fs/HFileSystem.java | 2 +- .../org/apache/hadoop/hbase/io/FileLink.java | 10 ++--- .../org/apache/hadoop/hbase/io/HFileLink.java | 10 ++--- .../hadoop/hbase/io/hfile/BlockCache.java | 2 +- .../hadoop/hbase/io/hfile/HFileBlock.java | 21 ++++------ .../hadoop/hbase/io/hfile/HFileReaderV1.java | 1 - .../hadoop/hbase/io/hfile/LruBlockCache.java | 2 +- .../apache/hadoop/hbase/ipc/HBaseClient.java | 1 - .../apache/hadoop/hbase/ipc/HBaseServer.java | 2 +- .../mapreduce/MultithreadedTableMapper.java | 6 +-- pom.xml | 1 + 53 files changed, 96 insertions(+), 100 deletions(-) diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java index a5c52790e10..b697e266e9c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/HTableDescriptor.java @@ -901,7 +901,8 @@ public class HTableDescriptor implements WritableComparable { /** * INTERNAL This method is a part of {@link WritableComparable} interface * and is used for serialization of the HTableDescriptor over RPC - * @deprecated Writables are going away. Use pb {@link #toByteArray(byte[])} instead. + * @deprecated Writables are going away. + * Use {@link com.google.protobuf.MessageLite#toByteArray} instead. */ @Deprecated @Override diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/MasterAdminProtocol.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/MasterAdminProtocol.java index 7b503ea1808..57d83efd5a4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/MasterAdminProtocol.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/MasterAdminProtocol.java @@ -153,8 +153,8 @@ public interface MasterAdminProtocol extends /** * Unassign a region from current hosting regionserver. Region will then be * assigned to a regionserver chosen at random. Region could be reassigned - * back to the same server. Use {@link #moveRegion(RpcController,MoveRegionRequest)} - * if you want to control the region movement. + * back to the same server. Use {@link #moveRegion} if you want to + * control the region movement. * @param controller Unused (set to null). * @param req The request that contains:
* - region: Region to unassign. Will clear any existing RegionPlan diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/Action.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/Action.java index f124bb0ae4f..06475d04c19 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/Action.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/Action.java @@ -23,7 +23,7 @@ import org.apache.hadoop.classification.InterfaceStability; /** * A Get, Put or Delete associated with it's region. Used internally by - * {@link HTable::batch} to associate the action with it's region and maintain + * {@link HTable#batch} to associate the action with it's region and maintain * the index from the original request. */ @InterfaceAudience.Public @@ -34,7 +34,7 @@ public class Action implements Comparable { private int originalIndex; private R result; - /* + /** * This constructor is replaced by {@link #Action(Row, int)} */ @Deprecated diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java index cdb72a15227..6f3ce224e6b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HBaseAdmin.java @@ -460,7 +460,7 @@ public class HBaseAdmin implements Abortable, Closeable { /** * Creates a new table but does not block and wait for it to come online. * Asynchronous operation. To check if the table exists, use - * {@link: #isTableAvailable()} -- it is not safe to create an HTable + * {@link #isTableAvailable} -- it is not safe to create an HTable * instance to this table before it is available. * Note : Avoid passing empty split key. * @param desc table descriptor for table diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTable.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTable.java index 373a76613c8..0b0f9270da8 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTable.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTable.java @@ -487,8 +487,8 @@ public class HTable implements HTableInterface { /** * Get the corresponding regions for an arbitrary range of keys. *

- * @param startRow Starting row in range, inclusive - * @param endRow Ending row in range, exclusive + * @param startKey Starting row in range, inclusive + * @param endKey Ending row in range, exclusive * @return A list of HRegionLocations corresponding to the regions that * contain the specified range * @throws IOException if a remote or network exception occurs @@ -917,8 +917,7 @@ public class HTable implements HTableInterface { * Process a mixed batch of Get, Put and Delete actions. All actions for a * RegionServer are forwarded in one RPC call. Queries are executed in parallel. * - * - * @param actions The collection of actions. + * @param list The collection of actions. * @param results An empty array, same size as list. If an exception is thrown, * you can test here for partial results, and to determine which actions * processed successfully. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java index a988b1d5eb4..ec985d90451 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/HTableInterface.java @@ -290,8 +290,7 @@ public interface HTableInterface extends Closeable { * Performs multiple mutations atomically on a single row. Currently * {@link Put} and {@link Delete} are supported. * - * @param arm object that specifies the set of mutations to perform - * atomically + * @param rm object that specifies the set of mutations to perform atomically * @throws IOException */ public void mutateRow(final RowMutations rm) throws IOException; @@ -537,7 +536,8 @@ public interface HTableInterface extends Closeable { /** * Creates an instance of the given {@link com.google.protobuf.Service} subclass for each table * region spanning the range from the {@code startKey} row to {@code endKey} row (inclusive), - * and invokes the passed {@link Batch.Call#call(Object)} method with each {@link Service} + * and invokes the passed {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call#call} + * method with each {@link Service} * instance. * * @param service the protocol buffer {@code Service} implementation to call @@ -545,11 +545,13 @@ public interface HTableInterface extends Closeable { * selection will start with the first table region. * @param endKey select regions up to and including the region containing this row. * If {@code null}, selection will continue through the last table region. - * @param callable this instance's {@link Batch.Call#call(Object)} method will be invoked once - * per table region, using the {@link Service} instance connected to that region. + * @param callable this instance's + * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call#call} + * method will be invoked once per table region, using the {@link Service} + * instance connected to that region. * @param the {@link Service} subclass to connect to * @param Return type for the {@code callable} parameter's - * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call#call(Object)} method + * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call#call} method * @return a map of result values keyed by region name */ Map coprocessorService(final Class service, @@ -559,14 +561,14 @@ public interface HTableInterface extends Closeable { /** * Creates an instance of the given {@link com.google.protobuf.Service} subclass for each table * region spanning the range from the {@code startKey} row to {@code endKey} row (inclusive), - * and invokes the passed {@link Batch.Call#call(Object)} method with each {@link Service} - * instance. + * and invokes the passed {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call#call} + * method with each {@link Service} instance. * *

* The given * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Callback#update(byte[], byte[], Object)} - * method will be called with the return value from each region's {@link Batch.Call#call(Object)} - * invocation. + * method will be called with the return value from each region's + * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call#call} invocation. *

* * @param service the protocol buffer {@code Service} implementation to call @@ -574,12 +576,14 @@ public interface HTableInterface extends Closeable { * selection will start with the first table region. * @param endKey select regions up to and including the region containing this row. * If {@code null}, selection will continue through the last table region. - * @param callable this instance's {@link Batch.Call#call(Object)} method will be invoked once - * per table region, using the {@link Service} instance connected to that region. + * @param callable this instance's + * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call#call} method + * will be invoked once per table region, using the {@link Service} instance + * connected to that region. * @param callback * @param the {@link Service} subclass to connect to * @param Return type for the {@code callable} parameter's - * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call#call(Object)} method + * {@link org.apache.hadoop.hbase.client.coprocessor.Batch.Call#call} method */ void coprocessorService(final Class service, byte[] startKey, byte[] endKey, final Batch.Call callable, @@ -589,7 +593,7 @@ public interface HTableInterface extends Closeable { * See {@link #setAutoFlush(boolean, boolean)} * * @param autoFlush - * Whether or not to enable 'auto-flush'. + * Whether or not to enable 'auto-flush'. */ public void setAutoFlush(boolean autoFlush); @@ -600,13 +604,13 @@ public interface HTableInterface extends Closeable { * and are immediately executed. Failed operations are not retried. This is * slower but safer. *

- * Turning off {@link #autoFlush} means that multiple {@link Put}s will be + * Turning off {@code autoFlush} means that multiple {@link Put}s will be * accepted before any RPC is actually sent to do the write operations. If the * application dies before pending writes get flushed to HBase, data will be * lost. *

- * When you turn {@link #autoFlush} off, you should also consider the - * {@link #clearBufferOnFail} option. By default, asynchronous {@link Put} + * When you turn {@code #autoFlush} off, you should also consider the + * {@code clearBufferOnFail} option. By default, asynchronous {@link Put} * requests will be retried on failure until successful. However, this can * pollute the writeBuffer and slow down batching performance. Additionally, * you may want to issue a number of Put requests and call @@ -615,9 +619,9 @@ public interface HTableInterface extends Closeable { * has been called, regardless of success. * * @param autoFlush - * Whether or not to enable 'auto-flush'. + * Whether or not to enable 'auto-flush'. * @param clearBufferOnFail - * Whether to keep Put failures in the writeBuffer + * Whether to keep Put failures in the writeBuffer * @see #flushCommits */ public void setAutoFlush(boolean autoFlush, boolean clearBufferOnFail); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ServerCallable.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ServerCallable.java index 47f699a0280..28cc7c2fca4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ServerCallable.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/client/ServerCallable.java @@ -147,7 +147,6 @@ public abstract class ServerCallable implements Callable { * Run this instance with retries, timed waits, * and refinds of missing regions. * - * @param the type of the return value * @return an object of type T * @throws IOException if a remote or network exception occurs * @throws RuntimeException other unspecified error @@ -202,7 +201,6 @@ public abstract class ServerCallable implements Callable { /** * Run this instance against the server once. - * @param the type of the return value * @return an object of type T * @throws IOException if a remote or network exception occurs * @throws RuntimeException other unspecified error diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java index 368f1f7b42b..7f32ba1f37d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/BaseRowProcessorEndpoint.java @@ -40,7 +40,7 @@ import com.google.protobuf.Service; /** * This class demonstrates how to implement atomic read-modify-writes - * using {@link HRegion#processRowsWithLocks()} and Coprocessor endpoints. + * using {@link HRegion#processRowsWithLocks} and Coprocessor endpoints. */ @InterfaceAudience.Public @InterfaceStability.Evolving @@ -54,7 +54,7 @@ extends RowProcessorService implements CoprocessorService, Coprocessor { * RowProcessorEndpoint. This way the RowProcessor can be class-loaded with * the Coprocessor endpoint together. * - * See {@link TestRowProcessorEndpoint} for example. + * See {@code TestRowProcessorEndpoint} for example. * * The request contains information for constructing processor * (see {@link #constructRowProcessorFromRequest}. The processor object defines diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java index 08342410dc8..2f1f3d7c1a3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/coprocessor/RegionObserver.java @@ -185,7 +185,7 @@ public interface RegionObserver extends Coprocessor { * @param c the environment provided by the region server * @param store the store being compacted * @param scanners the list {@link StoreFileScanner}s to be read from - * @param scantype the {@link ScanType} indicating whether this is a major or minor compaction + * @param scanType the {@link ScanType} indicating whether this is a major or minor compaction * @param earliestPutTs timestamp of the earliest put that was found in any of the involved * store files * @param s the base scanner, if not {@code null}, from previous RegionObserver in the chain diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java index 1df44da1eab..26282db5e54 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java @@ -62,7 +62,7 @@ public class BinaryComparator extends ByteArrayComparable { * @param pbBytes A pb serialized {@link BinaryComparator} instance * @return An instance of {@link BinaryComparator} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static BinaryComparator parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java index c102982e867..575ff1e7c46 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java @@ -64,7 +64,7 @@ public class BinaryPrefixComparator extends ByteArrayComparable { * @param pbBytes A pb serialized {@link BinaryPrefixComparator} instance * @return An instance of {@link BinaryPrefixComparator} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static BinaryPrefixComparator parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java index 30bf1feae1c..73afedca461 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java @@ -79,7 +79,7 @@ public class BitComparator extends ByteArrayComparable { * @param pbBytes A pb serialized {@link BitComparator} instance * @return An instance of {@link BitComparator} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static BitComparator parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ByteArrayComparable.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ByteArrayComparable.java index 31bdda22b2f..93b73e26892 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ByteArrayComparable.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ByteArrayComparable.java @@ -62,7 +62,7 @@ public abstract class ByteArrayComparable implements Comparable { * @param pbBytes A pb serialized {@link ByteArrayComparable} instance * @return An instance of {@link ByteArrayComparable} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static ByteArrayComparable parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java index acf33e35428..d775177d62c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java @@ -88,7 +88,7 @@ public class ColumnCountGetFilter extends FilterBase { * @param pbBytes A pb serialized {@link ColumnCountGetFilter} instance * @return An instance of {@link ColumnCountGetFilter} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static ColumnCountGetFilter parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java index 1bc6276b1ad..d58429fb627 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java @@ -108,7 +108,7 @@ public class ColumnPaginationFilter extends FilterBase * @param pbBytes A pb serialized {@link ColumnPaginationFilter} instance * @return An instance of {@link ColumnPaginationFilter} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static ColumnPaginationFilter parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java index f45b4c1df2f..226b2b1d3a2 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java @@ -102,7 +102,7 @@ public class ColumnPrefixFilter extends FilterBase { * @param pbBytes A pb serialized {@link ColumnPrefixFilter} instance * @return An instance of {@link ColumnPrefixFilter} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static ColumnPrefixFilter parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java index b19671e865a..a275d7d28da 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java @@ -180,7 +180,7 @@ public class ColumnRangeFilter extends FilterBase { * @param pbBytes A pb serialized {@link ColumnRangeFilter} instance * @return An instance of {@link ColumnRangeFilter} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static ColumnRangeFilter parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java index df3301f99d2..65ec48f1eef 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java @@ -234,7 +234,7 @@ public class DependentColumnFilter extends CompareFilter { * @param pbBytes A pb serialized {@link DependentColumnFilter} instance * @return An instance of {@link DependentColumnFilter} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static DependentColumnFilter parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java index 9633ca9fce0..fb7af8db458 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java @@ -92,7 +92,7 @@ public class FamilyFilter extends CompareFilter { * @param pbBytes A pb serialized {@link FamilyFilter} instance * @return An instance of {@link FamilyFilter} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static FamilyFilter parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/Filter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/Filter.java index bffd329e11f..edb456ebd13 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/Filter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/Filter.java @@ -180,7 +180,7 @@ public abstract class Filter { * @param pbBytes A pb serialized {@link Filter} instance * @return An instance of {@link Filter} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static Filter parseFrom(final byte [] pbBytes) throws DeserializationException { throw new DeserializationException( diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java index 095c6df3dc2..81f9a4ba3ac 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java @@ -293,7 +293,7 @@ public class FilterList extends Filter { * @param pbBytes A pb serialized {@link FilterList} instance * @return An instance of {@link FilterList} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static FilterList parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java index b7e3072126a..7a9af3506ff 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FilterWrapper.java @@ -66,7 +66,7 @@ public class FilterWrapper extends Filter { * @param pbBytes A pb serialized {@link FilterWrapper} instance * @return An instance of {@link FilterWrapper} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static FilterWrapper parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java index 52300b49b52..1b63560ecb3 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java @@ -86,7 +86,7 @@ public class FirstKeyOnlyFilter extends FilterBase { * @param pbBytes A pb serialized {@link FirstKeyOnlyFilter} instance * @return An instance of {@link FirstKeyOnlyFilter} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static FirstKeyOnlyFilter parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java index b77a73096e1..970e4927c08 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java @@ -93,7 +93,7 @@ public class FirstKeyValueMatchingQualifiersFilter extends FirstKeyOnlyFilter { * @param pbBytes A pb serialized {@link FirstKeyValueMatchingQualifiersFilter} instance * @return An instance of {@link FirstKeyValueMatchingQualifiersFilter} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static FirstKeyValueMatchingQualifiersFilter parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java index f292eec1af4..00d7b124f22 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java @@ -150,7 +150,7 @@ public class FuzzyRowFilter extends FilterBase { * @param pbBytes A pb serialized {@link FuzzyRowFilter} instance * @return An instance of {@link FuzzyRowFilter} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static FuzzyRowFilter parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java index 4ec249af979..6fb1a6241e6 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java @@ -95,7 +95,7 @@ public class InclusiveStopFilter extends FilterBase { * @param pbBytes A pb serialized {@link InclusiveStopFilter} instance * @return An instance of {@link InclusiveStopFilter} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static InclusiveStopFilter parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java index b8257899180..3bb1390322a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java @@ -74,7 +74,7 @@ public class KeyOnlyFilter extends FilterBase { * @param pbBytes A pb serialized {@link KeyOnlyFilter} instance * @return An instance of {@link KeyOnlyFilter} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static KeyOnlyFilter parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java index 4873b4fd49f..be165aa6f74 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java @@ -121,7 +121,7 @@ public class MultipleColumnPrefixFilter extends FilterBase { * @param pbBytes A pb serialized {@link MultipleColumnPrefixFilter} instance * @return An instance of {@link MultipleColumnPrefixFilter} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static MultipleColumnPrefixFilter parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java index d2025b20314..d944d3e444e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java @@ -61,7 +61,7 @@ public class NullComparator extends ByteArrayComparable { * @param pbBytes A pb serialized {@link NullComparator} instance * @return An instance of {@link NullComparator} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static NullComparator parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java index b85af2a8d44..2e4628819a4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java @@ -93,7 +93,7 @@ public class PageFilter extends FilterBase { * @param pbBytes A pb serialized {@link PageFilter} instance * @return An instance of {@link PageFilter} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static PageFilter parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java index a4b3fe13142..9c37b3e685c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java @@ -90,7 +90,7 @@ public class PrefixFilter extends FilterBase { * @param pbBytes A pb serialized {@link PrefixFilter} instance * @return An instance of {@link PrefixFilter} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static PrefixFilter parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java index 26ba2fbe407..24dcb60a97e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java @@ -92,7 +92,7 @@ public class QualifierFilter extends CompareFilter { * @param pbBytes A pb serialized {@link QualifierFilter} instance * @return An instance of {@link QualifierFilter} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static QualifierFilter parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java index e34b96edfb6..97243695be1 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java @@ -122,7 +122,7 @@ public class RandomRowFilter extends FilterBase { * @param pbBytes A pb serialized {@link RandomRowFilter} instance * @return An instance of {@link RandomRowFilter} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static RandomRowFilter parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java index 301303f5628..96c35c38812 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java @@ -62,7 +62,7 @@ import java.util.regex.Pattern; * ValueFilter vf = new ValueFilter(CompareOp.EQUAL, * new RegexStringComparator("regex", Pattern.CASE_INSENSITIVE | Pattern.DOTALL)); * - * @see java.util.regex.Pattern; + * @see java.util.regex.Pattern */ @InterfaceAudience.Public @InterfaceStability.Stable @@ -132,7 +132,7 @@ public class RegexStringComparator extends ByteArrayComparable { * @param pbBytes A pb serialized {@link RegexStringComparator} instance * @return An instance of {@link RegexStringComparator} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static RegexStringComparator parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java index 760cc966de9..0226a138b7c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java @@ -107,7 +107,7 @@ public class RowFilter extends CompareFilter { * @param pbBytes A pb serialized {@link RowFilter} instance * @return An instance of {@link RowFilter} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static RowFilter parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java index 023462ddeb1..c838db5a58d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java @@ -136,7 +136,7 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter { * @param pbBytes A pb serialized {@link SingleColumnValueExcludeFilter} instance * @return An instance of {@link SingleColumnValueExcludeFilter} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static SingleColumnValueExcludeFilter parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java index d1cc55e059d..f8f3da9eaa5 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java @@ -332,7 +332,7 @@ public class SingleColumnValueFilter extends FilterBase { * @param pbBytes A pb serialized {@link SingleColumnValueFilter} instance * @return An instance of {@link SingleColumnValueFilter} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static SingleColumnValueFilter parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java index 7651636892a..1d4388d3da0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java @@ -108,7 +108,7 @@ public class SkipFilter extends FilterBase { * @param pbBytes A pb serialized {@link SkipFilter} instance * @return An instance of {@link SkipFilter} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static SkipFilter parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java index eb9fb044d2e..1ed08a2a78a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java @@ -82,7 +82,7 @@ public class SubstringComparator extends ByteArrayComparable { * @param pbBytes A pb serialized {@link SubstringComparator} instance * @return An instance of {@link SubstringComparator} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static SubstringComparator parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java index 62249835fee..20f5d25ec31 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java @@ -123,7 +123,7 @@ public class TimestampsFilter extends FilterBase { * @param pbBytes A pb serialized {@link TimestampsFilter} instance * @return An instance of {@link TimestampsFilter} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static TimestampsFilter parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java index 56c1b3b981c..de100f3ac0a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java @@ -88,7 +88,7 @@ public class ValueFilter extends CompareFilter { * @param pbBytes A pb serialized {@link ValueFilter} instance * @return An instance of {@link ValueFilter} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static ValueFilter parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java index 5e435a85be0..6c454e54241 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java @@ -108,7 +108,7 @@ public class WhileMatchFilter extends FilterBase { * @param pbBytes A pb serialized {@link WhileMatchFilter} instance * @return An instance of {@link WhileMatchFilter} made from bytes * @throws DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray */ public static WhileMatchFilter parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java index 09244d5e24e..20201349c9b 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/fs/HFileSystem.java @@ -66,7 +66,7 @@ public class HFileSystem extends FilterFileSystem { /** * Create a FileSystem object for HBase regionservers. * @param conf The configuration to be used for the filesystem - * @param useHBaseChecksums if true, then use + * @param useHBaseChecksum if true, then use * checksum verfication in hbase, otherwise * delegate checksum verification to the FileSystem. */ diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java index bfa63fb285f..4bd8eaa55e4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/FileLink.java @@ -56,13 +56,13 @@ import org.apache.hadoop.hbase.util.FSUtils; * original location or in the archive folder. * The FileLink class tries to abstract this concept and given a set of locations * it is able to switch between them making this operation transparent for the user. - * More concrete implementations of the FileLink are the {@link HFileLink} and the {@link HLogLink}. + * {@link HFileLink} is a more concrete implementation of the {@code FileLink}. * *

Back-references: - * To help the {@link CleanerChore} to keep track of the links to a particular file, - * during the FileLink creation, a new file is placed inside a back-reference directory. - * There's one back-reference directory for each file that has links, - * and in the directory there's one file per link. + * To help the {@link org.apache.hadoop.hbase.master.cleaner.CleanerChore} to keep track of + * the links to a particular file, during the {@code FileLink} creation, a new file is placed + * inside a back-reference directory. There's one back-reference directory for each file that + * has links, and in the directory there's one file per link. * *

HFileLink Example *

    diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HFileLink.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HFileLink.java index a94aa66ce4e..19084aff321 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HFileLink.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/HFileLink.java @@ -69,7 +69,7 @@ public class HFileLink extends FileLink { } /** - * @param rootdir Path to the root directory where hbase files are stored + * @param rootDir Path to the root directory where hbase files are stored * @param archiveDir Path to the hbase archive directory * @param path The path of the HFile Link. */ @@ -82,7 +82,7 @@ public class HFileLink extends FileLink { /** * @param originPath Path to the hfile in the table directory - * @param archiveDir Path to the hfile in the archive directory + * @param archivePath Path to the hfile in the archive directory */ public HFileLink(final Path originPath, final Path archivePath) { this.originPath = originPath; @@ -105,7 +105,7 @@ public class HFileLink extends FileLink { } /** - * @param p Path to check. + * @param path Path to check. * @return True if the path is a HFileLink. */ public static boolean isHFileLink(final Path path) { @@ -158,7 +158,7 @@ public class HFileLink extends FileLink { * or a path to the archived file like: /hbase/.archive/table/region/cf/hfile * * @param fs {@link FileSystem} on which to check the HFileLink - * @param rootdir root hbase directory + * @param rootDir root hbase directory * @param archiveDir Path to the hbase archive directory * @param path HFileLink path * @return Referenced path (original path or archived path) @@ -325,7 +325,7 @@ public class HFileLink extends FileLink { /** * Get the full path of the HFile referenced by the back reference * - * @param rootdir root hbase directory + * @param rootDir root hbase directory * @param linkRefPath Link Back Reference path * @return full path of the referenced hfile * @throws IOException on unexpected error. diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java index 90c269f5393..670fb864695 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/BlockCache.java @@ -51,8 +51,8 @@ public interface BlockCache { * @param caching Whether this request has caching enabled (used for stats) * @param repeat Whether this is a repeat lookup for the same block * (used to avoid double counting cache misses when doing double-check locking) - * {@see HFileReaderV2#readBlock(long, long, boolean, boolean, boolean, BlockType)} * @return Block or null if block is not in 2 cache. + * @see HFileReaderV2#readBlock(long, long, boolean, boolean, boolean, BlockType) */ public Cacheable getBlock(BlockCacheKey cacheKey, boolean caching, boolean repeat); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java index 2b70163f057..cac754befee 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlock.java @@ -596,17 +596,14 @@ public class HFileBlock implements Cacheable { /** * Unified version 2 {@link HFile} block writer. The intended usage pattern * is as follows: - *
      - *
    • Construct an {@link HFileBlock.Writer}, providing a compression - * algorithm - *
    • Call {@link Writer#startWriting(BlockType, boolean)} and get a data stream to - * write to - *
    • Write your data into the stream - *
    • Call {@link Writer#writeHeaderAndData(FSDataOutputStream)} as many times as you need to - * store the serialized block into an external stream, or call - * {@link Writer#getHeaderAndData()} to get it as a byte array. - *
    • Repeat to write more blocks - *
    + *
      + *
    1. Construct an {@link HFileBlock.Writer}, providing a compression algorithm. + *
    2. Call {@link Writer#startWriting} and get a data stream to write to. + *
    3. Write your data into the stream. + *
    4. Call {@link Writer#writeHeaderAndData(FSDataOutputStream)} as many times as you need to. + * store the serialized block into an external stream. + *
    5. Repeat to write more blocks. + *
    *

    */ public static class Writer { @@ -696,7 +693,7 @@ public class HFileBlock implements Cacheable { /** * @param compressionAlgorithm compression algorithm to use - * @param dataBlockEncoderAlgo data block encoding algorithm to use + * @param dataBlockEncoder data block encoding algorithm to use * @param checksumType type of checksum * @param bytesPerChecksum bytes per checksum */ diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV1.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV1.java index 125dcdc8845..48395d6486c 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV1.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileReaderV1.java @@ -401,7 +401,6 @@ public class HFileReaderV1 extends AbstractHFileReader { * first key in the block = key, then you'll get thrown exceptions. * @param key to find * @param seekBefore find the key before the exact match. - * @return */ protected abstract int blockSeek(byte[] key, int offset, int length, boolean seekBefore); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java index 5ea08a89368..126c6eb0be4 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/LruBlockCache.java @@ -334,8 +334,8 @@ public class LruBlockCache implements BlockCache, HeapSize { * @param caching true if the caller caches blocks on cache misses * @param repeat Whether this is a repeat lookup for the same block * (used to avoid double counting cache misses when doing double-check locking) - * {@see HFileReaderV2#readBlock(long, long, boolean, boolean, boolean, BlockType)} * @return buffer of specified cache key, or null if not in cache + * @see HFileReaderV2#readBlock(long, long, boolean, boolean, boolean, BlockType) */ @Override public Cacheable getBlock(BlockCacheKey cacheKey, boolean caching, boolean repeat) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java index 039fc6ae11e..9d6dfd21b8f 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseClient.java @@ -1173,7 +1173,6 @@ public class HBaseClient { /** * Construct an IPC client with the default SocketFactory - * @param valueClass value class * @param conf configuration */ public HBaseClient(Configuration conf) { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java index 0d8d77f53b2..4bc4e26d95e 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/HBaseServer.java @@ -193,7 +193,7 @@ public abstract class HBaseServer implements RpcServer { /** Returns the server instance called under or null. May be called under * {@link #call(Class, RpcRequestBody, long, MonitoredRPCHandler)} implementations, - * and under protobuf methods of paramters and return values. + * and under protobuf methods of parameters and return values. * Permits applications to access the server context. * @return HBaseServer */ diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java index 8cd62586fa0..755f7cdafd9 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/mapreduce/MultithreadedTableMapper.java @@ -52,10 +52,8 @@ import org.apache.hadoop.util.ReflectionUtils; * Mapper implementations using this MapRunnable must be thread-safe. *

    * The Map-Reduce job has to be configured with the mapper to use via - * {@link #setMapperClass(Configuration, Class)} and - * the number of thread the thread-pool can use with the - * {@link #getNumberOfThreads(Configuration) method. The default - * value is 10 threads. + * {@link #setMapperClass} and the number of thread the thread-pool can use with the + * {@link #getNumberOfThreads} method. The default value is 10 threads. *

    */ diff --git a/pom.xml b/pom.xml index 5d05b560dc6..8c6d1a4172c 100644 --- a/pom.xml +++ b/pom.xml @@ -398,6 +398,7 @@ 2g true target/apidocs + true