From b04c976fe6b818377c757f4ab52d27ba62cf8ceb Mon Sep 17 00:00:00 2001 From: Mike Drob Date: Thu, 26 Apr 2018 20:12:07 -0500 Subject: [PATCH] HBASE-20478 Update checkstyle to v8.2 Cannot go to latest (8.9) yet due to https://github.com/checkstyle/checkstyle/issues/5279 * move hbaseanti import checks to checkstyle * implment a few missing equals checks, and ignore one * fix lots of javadoc errors Signed-off-by: Sean Busbey --- dev-support/hbase-personality.sh | 18 --------- .../hbase/checkstyle-suppressions.xml | 2 + .../src/main/resources/hbase/checkstyle.xml | 8 +++- .../hbase/client/ClusterConnection.java | 4 +- .../hbase/client/replication/TableCFs.java | 2 +- .../hadoop/hbase/protobuf/ProtobufUtil.java | 2 +- .../hadoop/hbase/quotas/QuotaTableUtil.java | 2 +- .../shaded/protobuf/RequestConverter.java | 12 +++--- .../org/apache/hadoop/hbase/net/Address.java | 2 +- .../hadoop/hbase/util/AbstractByteRange.java | 14 +++++++ .../apache/hadoop/hbase/util/JRubyFormat.java | 8 ++-- .../hadoop/hbase/util/OrderedBytes.java | 2 +- .../java/org/apache/hadoop/hbase/Waiter.java | 2 +- .../access/SecureBulkLoadEndpoint.java | 9 ++++- .../hadoop/hbase/http/TestHttpServer.java | 8 ++-- .../hadoop/hbase/IntegrationTestBase.java | 6 +-- ...tegrationTestRegionReplicaReplication.java | 2 +- .../hbase/mapred/TestTableInputFormat.java | 6 +-- .../hbase/mapreduce/TestImportExport.java | 6 +-- .../hbase/mapreduce/TestTableInputFormat.java | 4 +- .../hadoop/hbase/util/LoadTestTool.java | 4 +- .../favored/StartcodeAgnosticServerName.java | 4 ++ .../hbase/io/hfile/HFileBlockIndex.java | 2 +- .../apache/hadoop/hbase/ipc/RpcServer.java | 2 +- .../hbase/regionserver/StoreFlushContext.java | 2 +- .../querymatcher/ColumnTracker.java | 4 +- .../security/access/AccessControlFilter.java | 2 +- .../hadoop/hbase/HBaseTestingUtility.java | 34 ++++++++--------- .../hadoop/hbase/MultithreadedTestUtil.java | 13 +++---- .../hbase/TestMetaTableAccessorNoCluster.java | 2 +- .../TestPartialResultsFromClientSide.java | 28 +++++++------- .../hbase/backup/TestHFileArchiving.java | 6 +-- .../client/HConnectionTestingUtility.java | 4 +- .../hbase/filter/TestColumnRangeFilter.java | 38 ++++++++++++------- .../hadoop/hbase/io/hfile/KVGenerator.java | 2 +- .../hbase/master/TestRegionPlacement.java | 7 ++-- .../regionserver/TestCompoundBloomFilter.java | 4 +- .../hbase/regionserver/TestHRegion.java | 2 +- .../hbase/snapshot/SnapshotTestingUtils.java | 8 ++-- .../hbase/util/HFileArchiveTestingUtil.java | 2 +- .../hadoop/hbase/util/TestRegionSplitter.java | 2 +- pom.xml | 4 +- 42 files changed, 159 insertions(+), 136 deletions(-) diff --git a/dev-support/hbase-personality.sh b/dev-support/hbase-personality.sh index c3f566866b1..da960193f22 100755 --- a/dev-support/hbase-personality.sh +++ b/dev-support/hbase-personality.sh @@ -661,24 +661,6 @@ function hbaseanti_patchfile ((result=result+1)) fi - warnings=$(${GREP} -c 'import org.apache.hadoop.classification' "${patchfile}") - if [[ ${warnings} -gt 0 ]]; then - add_vote_table -1 hbaseanti "" "The patch appears use Hadoop classification instead of HBase." - ((result=result+1)) - fi - - warnings=$(${GREP} -c 'import org.codehaus.jackson' "${patchfile}") - if [[ ${warnings} -gt 0 ]]; then - add_vote_table -1 hbaseanti "" "The patch appears use Jackson 1 classes/annotations." - ((result=result+1)) - fi - - warnings=$(${GREP} -cE 'org.apache.commons.logging.Log(Factory|;)' "${patchfile}") - if [[ ${warnings} -gt 0 ]]; then - add_vote_table -1 hbaseanti "" "The patch appears to use commons-logging instead of slf4j." - ((result=result+1)) - fi - if [[ ${result} -gt 0 ]]; then return 1 fi diff --git a/hbase-checkstyle/src/main/resources/hbase/checkstyle-suppressions.xml b/hbase-checkstyle/src/main/resources/hbase/checkstyle-suppressions.xml index 82876fbaae8..aea7e941a8f 100644 --- a/hbase-checkstyle/src/main/resources/hbase/checkstyle-suppressions.xml +++ b/hbase-checkstyle/src/main/resources/hbase/checkstyle-suppressions.xml @@ -37,4 +37,6 @@ + + diff --git a/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml b/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml index fa43c62ad0c..7ad797cf1f3 100644 --- a/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml +++ b/hbase-checkstyle/src/main/resources/hbase/checkstyle.xml @@ -51,6 +51,7 @@ + diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java index 3e055b0eb34..adf47ca2b4a 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClusterConnection.java @@ -232,7 +232,7 @@ public interface ClusterConnection extends Connection { /** * Establishes a connection to the region server at the specified address. - * @param serverName + * @param serverName the region server to connect to * @return proxy for HRegionServer * @throws IOException if a remote or network exception occurs */ @@ -242,7 +242,7 @@ public interface ClusterConnection extends Connection { * Establishes a connection to the region server at the specified address, and returns * a region client protocol. * - * @param serverName + * @param serverName the region server to connect to * @return ClientProtocol proxy for RegionServer * @throws IOException if a remote or network exception occurs * diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/TableCFs.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/TableCFs.java index eefcfbb8a1d..aea354beac0 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/TableCFs.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/TableCFs.java @@ -26,7 +26,7 @@ import org.apache.yetus.audience.InterfaceAudience; /** * Used by {@link org.apache.hadoop.hbase.client.Admin#listReplicatedTableCFs()}. - * The cfs is a map of . + * The cfs is a map of <ColumnFamily, ReplicationScope>. */ @InterfaceAudience.Public public class TableCFs { diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java index f5f383fa4d2..965aa34b5b0 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufUtil.java @@ -1561,7 +1561,7 @@ public final class ProtobufUtil { /** - * @see {@link #buildGetServerInfoRequest()} + * @see #buildGetServerInfoRequest() */ private static GetServerInfoRequest GET_SERVER_INFO_REQUEST = GetServerInfoRequest.newBuilder().build(); diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaTableUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaTableUtil.java index 5c284070496..8d9566590db 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaTableUtil.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaTableUtil.java @@ -84,7 +84,7 @@ import org.apache.hadoop.hbase.util.Bytes; * u.<user>q:s<global-quotas> * u.<user>q:s.<table><table-quotas> * u.<user>q:s.<ns><namespace-quotas> - * */ @InterfaceAudience.Private @InterfaceStability.Evolving diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java index 8ce2f1b855e..fc037a83e57 100644 --- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java +++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java @@ -1026,7 +1026,7 @@ public final class RequestConverter { } /** - * @see {@link #buildRollWALWriterRequest()} + * @see #buildRollWALWriterRequest() */ private static RollWALWriterRequest ROLL_WAL_WRITER_REQUEST = RollWALWriterRequest.newBuilder() .build(); @@ -1040,7 +1040,7 @@ public final class RequestConverter { } /** - * @see {@link #buildGetServerInfoRequest()} + * @see #buildGetServerInfoRequest() */ private static GetServerInfoRequest GET_SERVER_INFO_REQUEST = GetServerInfoRequest.newBuilder() .build(); @@ -1522,7 +1522,7 @@ public final class RequestConverter { } /** - * @see {@link #buildCatalogScanRequest} + * @see #buildCatalogScanRequest */ private static final RunCatalogScanRequest CATALOG_SCAN_REQUEST = RunCatalogScanRequest.newBuilder().build(); @@ -1544,7 +1544,7 @@ public final class RequestConverter { } /** - * @see {@link #buildIsCatalogJanitorEnabledRequest()} + * @see #buildIsCatalogJanitorEnabledRequest() */ private static final IsCatalogJanitorEnabledRequest IS_CATALOG_JANITOR_ENABLED_REQUEST = IsCatalogJanitorEnabledRequest.newBuilder().build(); @@ -1558,7 +1558,7 @@ public final class RequestConverter { } /** - * @see {@link #buildCleanerChoreRequest} + * @see #buildRunCleanerChoreRequest() */ private static final RunCleanerChoreRequest CLEANER_CHORE_REQUEST = RunCleanerChoreRequest.newBuilder().build(); @@ -1580,7 +1580,7 @@ public final class RequestConverter { } /** - * @see {@link #buildIsCleanerChoreEnabledRequest()} + * @see #buildIsCleanerChoreEnabledRequest() */ private static final IsCleanerChoreEnabledRequest IS_CLEANER_CHORE_ENABLED_REQUEST = IsCleanerChoreEnabledRequest.newBuilder().build(); diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java index ab7fa3bcd49..ea4ba12f666 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/net/Address.java @@ -26,7 +26,7 @@ import org.apache.hbase.thirdparty.com.google.common.net.HostAndPort; * An immutable type to hold a hostname and port combo, like an Endpoint * or java.net.InetSocketAddress (but without danger of our calling * resolve -- we do NOT want a resolve happening every time we want - * to hold a hostname and port combo). This class is also <>. + * to hold a hostname and port combo). This class is also {@link Comparable} *

In implementation this class is a facade over Guava's {@link HostAndPort}. * We cannot have Guava classes in our API hence this Type. */ diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractByteRange.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractByteRange.java index 60de6cdde4d..47c59a14769 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractByteRange.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/AbstractByteRange.java @@ -315,6 +315,20 @@ public abstract class AbstractByteRange implements ByteRange { hash = UNSET_HASH_VALUE; } + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (!(obj instanceof ByteRange)) { + return false; + } + return compareTo((ByteRange) obj) == 0; + } + /** * Bitwise comparison of each byte in the array. Unsigned comparison, not * paying attention to java's signed bytes. diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JRubyFormat.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JRubyFormat.java index 562f4ae59d7..84e425292bc 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JRubyFormat.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/JRubyFormat.java @@ -30,12 +30,13 @@ import org.apache.hbase.thirdparty.com.google.common.escape.Escapers; /** * Utility class for converting objects to JRuby. * - * It handles null, Boolean, Number, String, byte[], List, Map structures. + * It handles null, Boolean, Number, String, byte[], List<Object>, Map<String, Object> + * structures. * *

* E.g. *

- * Map map = new LinkedHashMap<>();
+ * Map<String, Object> map = new LinkedHashMap<>();
  * map.put("null", null);
  * map.put("boolean", true);
  * map.put("number", 1);
@@ -48,7 +49,8 @@ import org.apache.hbase.thirdparty.com.google.common.escape.Escapers;
  * 

* Calling {@link #print(Object)} method will result: *

- * { null => '', boolean => 'true', number => '1', string => 'str', binary => '010203', list => [ '1', '2', 'true' ] }
+ * { null => '', boolean => 'true', number => '1', string => 'str',
+ *   binary => '010203', list => [ '1', '2', 'true' ] }
  * 
*

*/ diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java index 299ea1d7542..ff2fd453835 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/util/OrderedBytes.java @@ -339,7 +339,7 @@ public class OrderedBytes { /** * Perform unsigned comparison between two long values. Conforms to the same interface as - * {@link org.apache.hadoop.hbase.CellComparator#COMPARATOR#compare(Object, Object)}. + * {@link org.apache.hadoop.hbase.CellComparator}. */ private static int unsignedCmp(long x1, long x2) { int cmp; diff --git a/hbase-common/src/test/java/org/apache/hadoop/hbase/Waiter.java b/hbase-common/src/test/java/org/apache/hadoop/hbase/Waiter.java index 70750056b13..15ecefede87 100644 --- a/hbase-common/src/test/java/org/apache/hadoop/hbase/Waiter.java +++ b/hbase-common/src/test/java/org/apache/hadoop/hbase/Waiter.java @@ -83,7 +83,7 @@ public final class Waiter { /** * A predicate 'closure' used by the {@link Waiter#waitFor(Configuration, long, Predicate)} and * {@link Waiter#waitFor(Configuration, long, Predicate)} and - * {@link Waiter#waitFor(Configuration, long, long, boolean, Predicate) methods. + * {@link Waiter#waitFor(Configuration, long, long, boolean, Predicate)} methods. */ @InterfaceAudience.Private public interface Predicate { diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java index a603ef14f2a..5418cd0b8e7 100644 --- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java +++ b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/security/access/SecureBulkLoadEndpoint.java @@ -95,6 +95,9 @@ public class SecureBulkLoadEndpoint extends SecureBulkLoadService implements Reg done.run(null); } + /** + * Convert from CPEP protobuf 2.5 to internal protobuf 3.3. + */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.PrepareBulkLoadRequest convert(PrepareBulkLoadRequest request) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { @@ -121,8 +124,7 @@ public class SecureBulkLoadEndpoint extends SecureBulkLoadService implements Reg } /** - * Convert from CPEP protobuf 2.5 to internal protobuf 3.3. - * @throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException + * Convert from CPEP protobuf 2.5 to internal protobuf 3.3. */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.CleanupBulkLoadRequest convert(CleanupBulkLoadRequest request) @@ -153,6 +155,9 @@ public class SecureBulkLoadEndpoint extends SecureBulkLoadService implements Reg done.run(SecureBulkLoadHFilesResponse.newBuilder().setLoaded(loaded).build()); } + /** + * Convert from CPEP protobuf 2.5 to internal protobuf 3.3. + */ org.apache.hadoop.hbase.shaded.protobuf.generated.ClientProtos.BulkLoadHFileRequest convert(BulkLoadHFileRequest request) throws org.apache.hbase.thirdparty.com.google.protobuf.InvalidProtocolBufferException { diff --git a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java index 10553da1752..0f5b8a1a016 100644 --- a/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java +++ b/hbase-http/src/test/java/org/apache/hadoop/hbase/http/TestHttpServer.java @@ -322,10 +322,10 @@ public class TestHttpServer extends HttpServerFunctionalTest { * will be accessed as the passed user, by sending user.name request * parameter. * - * @param urlstring - * @param userName - * @return - * @throws IOException + * @param urlstring The url to access + * @param userName The user to perform access as + * @return The HTTP response code + * @throws IOException if there is a problem communicating with the server */ static int getHttpStatusCode(String urlstring, String userName) throws IOException { diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java index ee94078291f..8126d0bebc2 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestBase.java @@ -38,10 +38,10 @@ import org.apache.hbase.thirdparty.org.apache.commons.cli.CommandLine; /** * Base class for HBase integration tests that want to use the Chaos Monkey. - * Usage: bin/hbase + * Usage: bin/hbase <sub_class_of_IntegrationTestBase> <options> * Options: -h,--help Show usage - * -m,--monkey Which chaos monkey to run - * -monkeyProps The properties file for specifying chaos monkey properties. + * -m,--monkey <arg> Which chaos monkey to run + * -monkeyProps <arg> The properties file for specifying chaos monkey properties. * -ncc Option to not clean up the cluster at the end. */ public abstract class IntegrationTestBase extends AbstractHBaseTool { diff --git a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaReplication.java b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaReplication.java index be2616ab744..328e5d56342 100644 --- a/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaReplication.java +++ b/hbase-it/src/test/java/org/apache/hadoop/hbase/IntegrationTestRegionReplicaReplication.java @@ -50,7 +50,7 @@ import org.apache.hbase.thirdparty.com.google.common.collect.Lists; * with the replication of the edits before read_delay_ms to the given region replica id so that * the read and verify will not fail. * - * The job will run for at least given runtime (default 10min) by running a concurrent + * The job will run for at least given runtime (default 10min) by running a concurrent * writer and reader workload followed by a concurrent updater and reader workload for * num_keys_per_server. *

diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java index 91bf3efd43a..2ef8351a09b 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapred/TestTableInputFormat.java @@ -107,8 +107,8 @@ public class TestTableInputFormat { /** * Setup a table with two rows and values. * - * @param tableName - * @return + * @param tableName the name of the table to create + * @return A Table instance for the created table. * @throws IOException */ public static Table createTable(byte[] tableName) throws IOException { @@ -119,7 +119,7 @@ public class TestTableInputFormat { * Setup a table with two rows and values per column family. * * @param tableName - * @return + * @return A Table instance for the created table. * @throws IOException */ public static Table createTable(byte[] tableName, byte[][] families) throws IOException { diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java index c7916de37a8..97933a44944 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestImportExport.java @@ -539,9 +539,9 @@ public class TestImportExport { } /** - * Count the number of keyvalues in the specified table for the given timerange - * @param table - * @return + * Count the number of keyvalues in the specified table with the given filter + * @param table the table to scan + * @return the number of keyvalues found * @throws IOException */ private int getCount(Table table, Filter filter) throws IOException { diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java index b7ea5d0ec16..eae860627b6 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/mapreduce/TestTableInputFormat.java @@ -105,7 +105,7 @@ public class TestTableInputFormat { * Setup a table with two rows and values. * * @param tableName - * @return + * @return A Table instance for the created table. * @throws IOException */ public static Table createTable(byte[] tableName) throws IOException { @@ -116,7 +116,7 @@ public class TestTableInputFormat { * Setup a table with two rows and values per column family. * * @param tableName - * @return + * @return A Table instance for the created table. * @throws IOException */ public static Table createTable(byte[] tableName, byte[][] families) throws IOException { diff --git a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java index 0f7c5c00469..41788588ff1 100644 --- a/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java +++ b/hbase-mapreduce/src/test/java/org/apache/hadoop/hbase/util/LoadTestTool.java @@ -820,10 +820,10 @@ public class LoadTestTool extends AbstractHBaseTool { /** * When NUM_TABLES is specified, the function starts multiple worker threads * which individually start a LoadTestTool instance to load a table. Each - * table name is in format _. For example, "-tn test -num_tables 2" + * table name is in format <tn>_<index>. For example, "-tn test -num_tables 2" * , table names will be "test_1", "test_2" * - * @throws IOException + * @throws IOException if one of the load tasks is unable to complete */ private int parallelLoadTables() throws IOException { diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/StartcodeAgnosticServerName.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/StartcodeAgnosticServerName.java index 4fa870fab96..d1f9f450f1d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/StartcodeAgnosticServerName.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/favored/StartcodeAgnosticServerName.java @@ -63,4 +63,8 @@ class StartcodeAgnosticServerName extends ServerName { public int hashCode() { return getHostAndPort().hashCode(); } + + // Do not need @Override #equals() because super.equals() delegates to compareTo(), which ends + // up doing the right thing. We have a test for it, so the checkstyle warning here would be a + // false positive. } diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java index e8818beb5bc..d353bf8ee35 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/io/hfile/HFileBlockIndex.java @@ -1481,7 +1481,7 @@ public class HFileBlockIndex { * The same as {@link #add(byte[], long, int, long)} but does not take the * key/value into account. Used for single-level indexes. * - * @see {@link #add(byte[], long, int, long)} + * @see #add(byte[], long, int, long) */ public void add(byte[] firstKey, long blockOffset, int onDiskDataSize) { add(firstKey, blockOffset, onDiskDataSize, -1); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java index 686d5785cc7..31f0860f582 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/ipc/RpcServer.java @@ -585,7 +585,7 @@ public abstract class RpcServer implements RpcServerInterface, } /** - * Helper for {@link #channelRead(java.nio.channels.ReadableByteChannel, java.nio.ByteBuffer). + * Helper for {@link #channelRead(java.nio.channels.ReadableByteChannel, java.nio.ByteBuffer)}. * Only one of readCh or writeCh should be non-null. * * @param readCh read channel diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFlushContext.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFlushContext.java index ec48879e440..e53fdc0de2a 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFlushContext.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/StoreFlushContext.java @@ -57,7 +57,7 @@ interface StoreFlushContext { * * A very short operation * - * @return + * @return whether compaction is required * @throws IOException */ boolean commit(MonitoredTask status) throws IOException; diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ColumnTracker.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ColumnTracker.java index dc210accfce..bd6cb20d829 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ColumnTracker.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/querymatcher/ColumnTracker.java @@ -60,7 +60,7 @@ public interface ColumnTracker extends ShipperListener { * method based on the return type (INCLUDE) of this method. The values that can be returned by * this method are {@link MatchCode#INCLUDE}, {@link MatchCode#SEEK_NEXT_COL} and * {@link MatchCode#SEEK_NEXT_ROW}. - * @param cell + * @param cell a cell with the column to match against * @param type The type of the Cell * @return The match code instance. * @throws IOException in case there is an internal consistency problem caused by a data @@ -77,7 +77,7 @@ public interface ColumnTracker extends ShipperListener { * Implementations which include all the columns could just return {@link MatchCode#INCLUDE} in * the {@link #checkColumn(Cell, byte)} method and perform all the operations in this * checkVersions method. - * @param cell + * @param cell a cell with the column to match against * @param timestamp The timestamp of the cell. * @param type the type of the key value (Put/Delete) * @param ignoreCount indicates if the KV needs to be excluded while counting (used during diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java index 7e444cb363b..b4e87d2f84d 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlFilter.java @@ -165,7 +165,7 @@ class AccessControlFilter extends FilterBase { * @param pbBytes A pb serialized {@link AccessControlFilter} instance * @return An instance of {@link AccessControlFilter} made from bytes * @throws org.apache.hadoop.hbase.exceptions.DeserializationException - * @see {@link #toByteArray()} + * @see #toByteArray() */ public static AccessControlFilter parseFrom(final byte [] pbBytes) throws DeserializationException { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java index ba40fbf272b..2bdb0a03afa 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/HBaseTestingUtility.java @@ -590,7 +590,7 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility { * Start a minidfscluster. * @param servers How many DNs to start. * @throws Exception - * @see {@link #shutdownMiniDFSCluster()} + * @see #shutdownMiniDFSCluster() * @return The mini dfs cluster created. */ public MiniDFSCluster startMiniDFSCluster(int servers) throws Exception { @@ -605,7 +605,7 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility { * datanodes will have the same host name. * @param hosts hostnames DNs to run on. * @throws Exception - * @see {@link #shutdownMiniDFSCluster()} + * @see #shutdownMiniDFSCluster() * @return The mini dfs cluster created. */ public MiniDFSCluster startMiniDFSCluster(final String hosts[]) @@ -623,7 +623,7 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility { * @param servers How many DNs to start. * @param hosts hostnames DNs to run on. * @throws Exception - * @see {@link #shutdownMiniDFSCluster()} + * @see #shutdownMiniDFSCluster() * @return The mini dfs cluster created. */ public MiniDFSCluster startMiniDFSCluster(int servers, final String hosts[]) @@ -767,7 +767,7 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility { * Start up a minicluster of hbase, dfs, and zookeeper. * @throws Exception * @return Mini hbase cluster instance created. - * @see {@link #shutdownMiniDFSCluster()} + * @see #shutdownMiniDFSCluster() */ public MiniHBaseCluster startMiniCluster() throws Exception { return startMiniCluster(1, 1); @@ -777,7 +777,7 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility { * Start up a minicluster of hbase, dfs, and zookeeper where WAL's walDir is created separately. * @throws Exception * @return Mini hbase cluster instance created. - * @see {@link #shutdownMiniDFSCluster()} + * @see #shutdownMiniDFSCluster() */ public MiniHBaseCluster startMiniCluster(boolean withWALDir) throws Exception { return startMiniCluster(1, 1, 1, null, null, null, false, withWALDir); @@ -789,7 +789,7 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility { * (will overwrite if dir already exists) * @throws Exception * @return Mini hbase cluster instance created. - * @see {@link #shutdownMiniDFSCluster()} + * @see #shutdownMiniDFSCluster() */ public MiniHBaseCluster startMiniCluster(final int numSlaves, boolean create) throws Exception { @@ -806,7 +806,7 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility { * hbase.regionserver.info.port is -1 (i.e. no ui per regionserver) otherwise * bind errors. * @throws Exception - * @see {@link #shutdownMiniCluster()} + * @see #shutdownMiniCluster() * @return Mini hbase cluster instance created. */ public MiniHBaseCluster startMiniCluster(final int numSlaves) @@ -823,7 +823,7 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility { * Start minicluster. Whether to create a new root or data dir path even if such a path * has been created earlier is decided based on flag create * @throws Exception - * @see {@link #shutdownMiniCluster()} + * @see #shutdownMiniCluster() * @return Mini hbase cluster instance created. */ public MiniHBaseCluster startMiniCluster(final int numMasters, @@ -835,7 +835,7 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility { /** * start minicluster * @throws Exception - * @see {@link #shutdownMiniCluster()} + * @see #shutdownMiniCluster() * @return Mini hbase cluster instance created. */ public MiniHBaseCluster startMiniCluster(final int numMasters, @@ -872,7 +872,7 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility { * If you start MiniDFSCluster without host names, * all instances of the datanodes will have the same host name. * @throws Exception - * @see {@link #shutdownMiniCluster()} + * @see #shutdownMiniCluster() * @return Mini hbase cluster instance created. */ public MiniHBaseCluster startMiniCluster(final int numMasters, @@ -914,7 +914,7 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility { * @param regionserverClass The class to use as HRegionServer, or null for * default * @throws Exception - * @see {@link #shutdownMiniCluster()} + * @see #shutdownMiniCluster() * @return Mini hbase cluster instance created. */ public MiniHBaseCluster startMiniCluster(final int numMasters, @@ -1003,7 +1003,7 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility { * @return Reference to the hbase mini hbase cluster. * @throws IOException * @throws InterruptedException - * @see {@link #startMiniCluster()} + * @see #startMiniCluster() */ public MiniHBaseCluster startMiniHBaseCluster(final int numMasters, final int numSlaves, List rsPorts, Class masterClass, @@ -1088,7 +1088,7 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility { /** * Stops mini hbase, zk, and hdfs clusters. * @throws IOException - * @see {@link #startMiniCluster(int)} + * @see #startMiniCluster(int) */ public void shutdownMiniCluster() throws Exception { LOG.info("Shutting down minicluster"); @@ -1746,10 +1746,10 @@ public class HBaseTestingUtility extends HBaseZKTestingUtility { /** * Create an HRegion that writes to the local tmp dirs - * @param desc - * @param startKey - * @param endKey - * @return + * @param desc a table descriptor indicating which table the region belongs to + * @param startKey the start boundary of the region + * @param endKey the end boundary of the region + * @return a region that writes to local dir for testing * @throws IOException */ public HRegion createLocalHRegion(TableDescriptor desc, byte [] startKey, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/MultithreadedTestUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/MultithreadedTestUtil.java index 86ac2f82655..99aef644129 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/MultithreadedTestUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/MultithreadedTestUtil.java @@ -163,26 +163,25 @@ public abstract class MultithreadedTestUtil { * Verify that no assertions have failed inside a future. * Used for unit tests that spawn threads. E.g., *

- * - * List> results = Lists.newArrayList(); - * Future f = executor.submit(new Callable { + *

+   *   List<Future<Void>> results = Lists.newArrayList();
+   *   Future<Void> f = executor.submit(new Callable<Void> {
    *     public Void call() {
    *       assertTrue(someMethod());
    *     }
    *   });
    *   results.add(f);
    *   assertOnFutures(results);
-   * 
+   * 
* @param threadResults A list of futures - * @param * @throws InterruptedException If interrupted when waiting for a result * from one of the futures * @throws ExecutionException If an exception other than AssertionError * occurs inside any of the futures */ - public static void assertOnFutures(List> threadResults) + public static void assertOnFutures(List> threadResults) throws InterruptedException, ExecutionException { - for (Future threadResult : threadResults) { + for (Future threadResult : threadResults) { try { threadResult.get(); } catch (ExecutionException e) { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java index 686f281c450..5d36ea90f95 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestMetaTableAccessorNoCluster.java @@ -122,7 +122,7 @@ public class TestMetaTableAccessorNoCluster { /** * Test that MetaTableAccessor will ride over server throwing * "Server not running" IOEs. - * @see @link {https://issues.apache.org/jira/browse/HBASE-3446} + * @see HBASE-3446 * @throws IOException * @throws InterruptedException */ diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java index 965243f61f0..54800a964f4 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/TestPartialResultsFromClientSide.java @@ -603,12 +603,12 @@ public class TestPartialResultsFromClientSide { /** * Make puts to put the input value into each combination of row, family, and qualifier - * @param rows - * @param families - * @param qualifiers - * @param value - * @return - * @throws IOException + * @param rows the rows to use + * @param families the families to use + * @param qualifiers the qualifiers to use + * @param value the values to use + * @return the dot product of the given rows, families, qualifiers, and values + * @throws IOException if there is a problem creating one of the Put objects */ static ArrayList createPuts(byte[][] rows, byte[][] families, byte[][] qualifiers, byte[] value) throws IOException { @@ -632,11 +632,11 @@ public class TestPartialResultsFromClientSide { /** * Make key values to represent each possible combination of family and qualifier in the specified * row. - * @param row - * @param families - * @param qualifiers - * @param value - * @return + * @param row the row to use + * @param families the families to use + * @param qualifiers the qualifiers to use + * @param value the values to use + * @return the dot product of the given families, qualifiers, and values for a given row */ static ArrayList createKeyValuesForRow(byte[] row, byte[][] families, byte[][] qualifiers, byte[] value) { @@ -772,9 +772,9 @@ public class TestPartialResultsFromClientSide { /** * Exhausts the scanner by calling next repetitively. Once completely exhausted, close scanner and * return total cell count - * @param scanner - * @return - * @throws Exception + * @param scanner the scanner to exhaust + * @return the number of cells counted + * @throws Exception if there is a problem retrieving cells from the scanner */ private int countCellsFromScanner(ResultScanner scanner) throws Exception { Result result = null; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java index 578724fef0c..05abbd1da56 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/backup/TestHFileArchiving.java @@ -424,9 +424,9 @@ public class TestHFileArchiving { /** * Get the names of all the files below the given directory - * @param fs - * @param archiveDir - * @return + * @param fs the file system to inspect + * @param archiveDir the directory in which to look + * @return a list of all files in the directory and sub-directories * @throws IOException */ private List getAllFileNames(final FileSystem fs, Path archiveDir) throws IOException { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/HConnectionTestingUtility.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/HConnectionTestingUtility.java index 0f896b33075..a8beab65e81 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/HConnectionTestingUtility.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/HConnectionTestingUtility.java @@ -50,7 +50,7 @@ public class HConnectionTestingUtility { /** * Get a Mocked {@link ClusterConnection} that goes with the passed conf * configuration instance. Minimally the mock will return - * conf when {@link ClusterConnection#getConfiguration()} is invoked. + * <code>conf</conf> when {@link ClusterConnection#getConfiguration()} is invoked. * Be sure to shutdown the connection when done by calling * {@link Connection#close()} else it will stick around; this is probably not what you want. * @param conf configuration @@ -148,7 +148,7 @@ public class HConnectionTestingUtility { * @param conf configuration * @return ClusterConnection object for conf * @throws ZooKeeperConnectionException - * @see @link + * [Dead link]: See also * {http://mockito.googlecode.com/svn/branches/1.6/javadoc/org/mockito/Mockito.html#spy(T)} */ public static ClusterConnection getSpiedConnection(final Configuration conf) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java index f6452fb7946..98bc57bdc55 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/filter/TestColumnRangeFilter.java @@ -24,9 +24,15 @@ import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Objects; import java.util.Set; -import org.apache.hadoop.hbase.*; + +import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.HBaseClassTestRule; +import org.apache.hadoop.hbase.HBaseTestingUtility; +import org.apache.hadoop.hbase.KeyValue; +import org.apache.hadoop.hbase.KeyValueTestUtil; +import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.client.Durability; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.Result; @@ -91,6 +97,24 @@ class StringRange { return hashCode; } + @Override + public boolean equals(Object obj) { + if (this == obj) { + return true; + } + if (obj == null) { + return false; + } + if (!(obj instanceof StringRange)) { + return false; + } + StringRange oth = (StringRange) obj; + return this.startInclusive == oth.startInclusive && + this.endInclusive == oth.endInclusive && + Objects.equals(this.start, oth.start) && + Objects.equals(this.end, oth.end); + } + @Override public String toString() { String result = (this.startInclusive ? "[" : "(") @@ -133,33 +157,21 @@ public class TestColumnRangeFilter { @Rule public TestName name = new TestName(); - /** - * @throws java.lang.Exception - */ @BeforeClass public static void setUpBeforeClass() throws Exception { TEST_UTIL.startMiniCluster(); } - /** - * @throws java.lang.Exception - */ @AfterClass public static void tearDownAfterClass() throws Exception { TEST_UTIL.shutdownMiniCluster(); } - /** - * @throws java.lang.Exception - */ @Before public void setUp() throws Exception { // Nothing to do. } - /** - * @throws java.lang.Exception - */ @After public void tearDown() throws Exception { // Nothing to do. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/KVGenerator.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/KVGenerator.java index b22cb8c20b8..892f4c9b322 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/KVGenerator.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/io/hfile/KVGenerator.java @@ -22,7 +22,7 @@ import org.apache.hadoop.io.BytesWritable; import org.apache.hadoop.io.WritableComparator; /** - * Generate random pairs. + * Generate random <key, value> pairs. *

* Copied from * hadoop-3315 tfile. diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java index a3aa5b9741e..f1ff63f4afb 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/master/TestRegionPlacement.java @@ -401,7 +401,7 @@ public class TestRegionPlacement { /** * Verify the number of user regions is assigned to the primary * region server based on the plan is expected - * @param expectedNum. + * @param expectedNum the expected number of assigned regions * @throws IOException */ private void verifyRegionOnPrimaryRS(int expectedNum) @@ -536,9 +536,8 @@ public class TestRegionPlacement { /** * Create a table with specified table name and region number. - * @param tablename - * @param regionNum - * @return + * @param tableName the name of the table to be created + * @param regionNum number of regions to create * @throws IOException */ private static void createTable(TableName tableName, int regionNum) diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java index 25eb700ee0a..0b17d286ccf 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestCompoundBloomFilter.java @@ -124,9 +124,7 @@ public class TestCompoundBloomFilter { private FileSystem fs; private BlockCache blockCache; - /** - * A message of the form in test#<number>" to include in logging. - */ + /** A message of the form "in test#<number>:" to include in logging. */ private String testIdMsg; private static final int GENERATION_SEED = 2319; diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java index 6856aadf5ba..1ff6b273d96 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestHRegion.java @@ -5925,7 +5925,7 @@ public class TestHRegion { /** * Utility method to setup a WAL mock. * Needs to do the bit where we close latch on the WALKeyImpl on append else test hangs. - * @return + * @return a mock WAL * @throws IOException */ private WAL mockWAL() throws IOException { diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java index e798839879d..09cac3d4e4e 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/snapshot/SnapshotTestingUtils.java @@ -274,9 +274,9 @@ public final class SnapshotTestingUtils { * Helper method for testing async snapshot operations. Just waits for the * given snapshot to complete on the server by repeatedly checking the master. * - * @param master: the master running the snapshot - * @param snapshot: the snapshot to check - * @param sleep: amount to sleep between checks to see if the snapshot is done + * @param master the master running the snapshot + * @param snapshot the snapshot to check + * @param sleep amount to sleep between checks to see if the snapshot is done * @throws ServiceException if the snapshot fails * @throws org.apache.hbase.thirdparty.com.google.protobuf.ServiceException */ @@ -357,7 +357,7 @@ public final class SnapshotTestingUtils { /** * List all the HFiles in the given table * - * @param fs: FileSystem where the table lives + * @param fs FileSystem where the table lives * @param tableDir directory of the table * @return array of the current HFiles in the table (could be a zero-length array) * @throws IOException on unexecpted error reading the FS diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java index 22a99a35df8..c58362c7880 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/HFileArchiveTestingUtil.java @@ -140,7 +140,7 @@ public class HFileArchiveTestingUtil { } /** - * @return , where each is sorted + * @return <expected, gotten, backup>, where each is sorted */ private static List> getFileLists(FileStatus[] previous, FileStatus[] archived) { List> files = new ArrayList<>(3); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java index f8f538a4797..ace22388f5f 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/util/TestRegionSplitter.java @@ -423,7 +423,7 @@ public class TestRegionSplitter { } /** - * List.indexOf() doesn't really work for a List <byte[]>, because byte[] + * List.indexOf() doesn't really work for a List<byte[]>, because byte[] * doesn't override equals(). This method checks whether a list contains * a given element by checking each element using the byte array * comparator. diff --git a/pom.xml b/pom.xml index 3bf83bc194e..cccc1762cce 100755 --- a/pom.xml +++ b/pom.xml @@ -1388,7 +1388,7 @@ 1.5.0-alpha.15 3.0.0 1.4 - 6.18 + 8.2 1.6.0 2.2.0 1.3.9-1 @@ -1397,7 +1397,7 @@ 1.0.0 1.8 3.3.0 - 2.17 + 3.0.0 3.6.1 3.0.1 2.10