{
*
*
*
- *
- * table.scanAll(new Scan().withStartRow(row, false).setLimit(1)).thenAccept(results -> {
+ * table.scanAll(new Scan().withStartRow(row, false).setLimit(1)).thenAccept(results -> {
* if (results.isEmpty()) {
- * System.out.println("No row after " + Bytes.toStringBinary(row));
+ * System.out.println("No row after " + Bytes.toStringBinary(row));
* } else {
* System.out.println("The closest row after " + Bytes.toStringBinary(row) + " is "
- * + Bytes.toStringBinary(results.stream().findFirst().get().getRow()));
+ * + Bytes.toStringBinary(results.stream().findFirst().get().getRow()));
* }
* });
- *
*
*
* If your result set is very large, you should use other scan method to get a scanner or use
@@ -574,9 +577,7 @@ public interface AsyncTable {
* one line lambda expression, like:
*
*
- *
- * channel -> xxxService.newStub(channel)
- *
+ * channel -> xxxService.newStub(channel)
*
*
* @param stubMaker a delegation to the actual {@code newStub} call.
@@ -609,7 +610,7 @@ public interface AsyncTable {
*
*
* locateThenCall(byte[] row) {
- * locate(row).whenComplete((location, locateError) -> {
+ * locate(row).whenComplete((location, locateError) -> {
* if (locateError != null) {
* callback.onError(locateError);
* return;
@@ -621,7 +622,7 @@ public interface AsyncTable {
* } else {
* locateThenCall(region.getEndKey());
* }
- * sendCall().whenComplete((resp, error) -> {
+ * sendCall().whenComplete((resp, error) -> {
* if (error != null) {
* callback.onRegionError(region, error);
* } else {
@@ -639,12 +640,14 @@ public interface AsyncTable {
interface CoprocessorCallback {
/**
+ * Indicate that the respose of a region is available
* @param region the region that the response belongs to
* @param resp the response of the coprocessor call
*/
void onRegionComplete(RegionInfo region, R resp);
/**
+ * Indicate that the error for a region is available
* @param region the region that the error belongs to
* @param error the response error of the coprocessor call
*/
@@ -675,6 +678,7 @@ public interface AsyncTable {
interface CoprocessorServiceBuilder {
/**
+ * Specify a start row
* @param startKey start region selection with region containing this row, inclusive.
*/
default CoprocessorServiceBuilder fromRow(byte[] startKey) {
@@ -682,12 +686,14 @@ public interface AsyncTable {
}
/**
+ * Specify a start row
* @param startKey start region selection with region containing this row
* @param inclusive whether to include the startKey
*/
CoprocessorServiceBuilder fromRow(byte[] startKey, boolean inclusive);
/**
+ * Specify a stop row
* @param endKey select regions up to and including the region containing this row, exclusive.
*/
default CoprocessorServiceBuilder toRow(byte[] endKey) {
@@ -695,6 +701,7 @@ public interface AsyncTable {
}
/**
+ * Specify a stop row
* @param endKey select regions up to and including the region containing this row
* @param inclusive whether to include the endKey
*/
@@ -716,9 +723,7 @@ public interface AsyncTable {
* is only a one line lambda expression, like:
*
*
- *
- * channel -> xxxService.newStub(channel)
- *
+ * channel -> xxxService.newStub(channel)
*
*
* @param stubMaker a delegation to the actual {@code newStub} call.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CatalogReplicaLoadBalanceSimpleSelector.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CatalogReplicaLoadBalanceSimpleSelector.java
index 2e704c5bdc1..e990fa6d65f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CatalogReplicaLoadBalanceSimpleSelector.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CatalogReplicaLoadBalanceSimpleSelector.java
@@ -128,6 +128,7 @@ class CatalogReplicaLoadBalanceSimpleSelector
* Selector's internal state.
* @param loc the location which causes exception.
*/
+ @Override
public void onError(HRegionLocation loc) {
ConcurrentNavigableMap tableCache = computeIfAbsent(staleCache,
loc.getRegion().getTable(), () -> new ConcurrentSkipListMap<>(BYTES_COMPARATOR));
@@ -159,18 +160,19 @@ class CatalogReplicaLoadBalanceSimpleSelector
* When it looks up a location, it will call this method to find a replica region to go. For a
* normal case, > 99% of region locations from catalog/meta replica will be up to date. In extreme
* cases such as region server crashes, it will depends on how fast replication catches up.
- * @param tablename table name it looks up
+ * @param tableName table name it looks up
* @param row key it looks up.
* @param locateType locateType, Only BEFORE and CURRENT will be passed in.
* @return catalog replica id
*/
- public int select(final TableName tablename, final byte[] row,
+ @Override
+ public int select(final TableName tableName, final byte[] row,
final RegionLocateType locateType) {
Preconditions.checkArgument(
locateType == RegionLocateType.BEFORE || locateType == RegionLocateType.CURRENT,
"Expected type BEFORE or CURRENT but got: %s", locateType);
- ConcurrentNavigableMap tableCache = staleCache.get(tablename);
+ ConcurrentNavigableMap tableCache = staleCache.get(tableName);
// If there is no entry in StaleCache, select a random replica id.
if (tableCache == null) {
@@ -199,7 +201,7 @@ class CatalogReplicaLoadBalanceSimpleSelector
(EnvironmentEdgeManager.currentTime() - entry.getValue().getTimestamp())
>= STALE_CACHE_TIMEOUT_IN_MILLISECONDS
) {
- LOG.debug("Entry for table {} with startKey {}, {} times out", tablename, entry.getKey(),
+ LOG.debug("Entry for table {} with startKey {}, {} times out", tableName, entry.getKey(),
entry);
tableCache.remove(entry.getKey());
return getRandomReplicaId();
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CheckAndMutate.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CheckAndMutate.java
index 56402fb42f8..b31a0b27242 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CheckAndMutate.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CheckAndMutate.java
@@ -97,6 +97,7 @@ public final class CheckAndMutate implements Row {
}
/**
+ * Check for match
* @param family family to check
* @param qualifier qualifier to check
* @param compareOp comparison operator to use
@@ -113,6 +114,7 @@ public final class CheckAndMutate implements Row {
}
/**
+ * Check for match
* @param filter filter to check
* @return the CheckAndMutate object
*/
@@ -122,6 +124,7 @@ public final class CheckAndMutate implements Row {
}
/**
+ * Specify a timerange
* @param timeRange time range to check
* @return the CheckAndMutate object
*/
@@ -144,6 +147,7 @@ public final class CheckAndMutate implements Row {
}
/**
+ * Build the CheckAndMutate object
* @param put data to put if check succeeds
* @return a CheckAndMutate object
*/
@@ -157,6 +161,7 @@ public final class CheckAndMutate implements Row {
}
/**
+ * Build the CheckAndMutate object
* @param delete data to delete if check succeeds
* @return a CheckAndMutate object
*/
@@ -170,6 +175,7 @@ public final class CheckAndMutate implements Row {
}
/**
+ * Build the CheckAndMutate object with an Increment to commit if the check succeeds.
* @param increment data to increment if check succeeds
* @return a CheckAndMutate object
*/
@@ -183,6 +189,7 @@ public final class CheckAndMutate implements Row {
}
/**
+ * Build the CheckAndMutate object with an Append to commit if the check succeeds.
* @param append data to append if check succeeds
* @return a CheckAndMutate object
*/
@@ -196,6 +203,7 @@ public final class CheckAndMutate implements Row {
}
/**
+ * Build the CheckAndMutate object with a RowMutations to commit if the check succeeds.
* @param mutations mutations to perform if check succeeds
* @return a CheckAndMutate object
*/
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientIdGenerator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientIdGenerator.java
index f5bedbe88d2..ee497ef56c5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientIdGenerator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ClientIdGenerator.java
@@ -19,6 +19,7 @@ package org.apache.hadoop.hbase.client;
import java.io.IOException;
import java.lang.management.ManagementFactory;
+import java.util.List;
import org.apache.hadoop.hbase.util.Addressing;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
@@ -26,6 +27,9 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import org.apache.hbase.thirdparty.com.google.common.base.Splitter;
+import org.apache.hbase.thirdparty.com.google.common.collect.Iterators;
+
/**
* The class that is able to determine some unique strings for the client, such as an IP address,
* PID, and composite deterministic ID.
@@ -38,8 +42,8 @@ final class ClientIdGenerator {
}
/**
- * @return a unique ID incorporating IP address, PID, TID and timer. Might be an overkill... Note
- * though that new UUID in java by default is just a random number.
+ * Returns a unique ID incorporating IP address, PID, TID and timer. Might be an overkill... Note
+ * though that new UUID in java by default is just a random number.
*/
public static byte[] generateClientId() {
byte[] selfBytes = getIpAddressBytes();
@@ -61,10 +65,10 @@ final class ClientIdGenerator {
/** Returns PID of the current process, if it can be extracted from JVM name, or null. */
public static Long getPid() {
String name = ManagementFactory.getRuntimeMXBean().getName();
- String[] nameParts = name.split("@");
- if (nameParts.length == 2) { // 12345@somewhere
+ List nameParts = Splitter.on('@').splitToList(name);
+ if (nameParts.size() == 2) { // 12345@somewhere
try {
- return Long.parseLong(nameParts[0]);
+ return Long.parseLong(Iterators.get(nameParts.iterator(), 0));
} catch (NumberFormatException ex) {
LOG.warn("Failed to get PID from [" + name + "]", ex);
}
@@ -75,8 +79,8 @@ final class ClientIdGenerator {
}
/**
- * @return Some IPv4/IPv6 address available on the current machine that is up, not virtual and not
- * a loopback address. Empty array if none can be found or error occurred.
+ * Returns Some IPv4/IPv6 address available on the current machine that is up, not virtual and not
+ * a loopback address. Empty array if none can be found or error occurred.
*/
public static byte[] getIpAddressBytes() {
try {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.java
index b3d8e4fd600..d73e67ee0fb 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptor.java
@@ -98,10 +98,7 @@ public interface ColumnFamilyDescriptor {
/** Returns an unmodifiable map. */
Map getConfiguration();
- /**
- * @param key the key whose associated value is to be returned
- * @return accessing the configuration value by key.
- */
+ /** Returns accessing the configuration value by key. */
String getConfigurationValue(String key);
/** Returns replication factor set for this CF */
@@ -117,8 +114,8 @@ public interface ColumnFamilyDescriptor {
String getEncryptionType();
/**
- * @return in-memory compaction policy if set for the cf. Returns null if no policy is set for for
- * this column family
+ * Returns in-memory compaction policy if set for the cf. Returns null if no policy is set for for
+ * this column family
*/
MemoryCompactionPolicy getInMemoryCompaction();
@@ -163,32 +160,35 @@ public interface ColumnFamilyDescriptor {
int getTimeToLive();
/**
+ * Get a configuration value.
* @param key The key.
* @return A clone value. Null if no mapping for the key
*/
Bytes getValue(Bytes key);
/**
+ * Get a configuration value.
* @param key The key.
* @return A clone value. Null if no mapping for the key
*/
String getValue(String key);
/**
+ * Get a configuration value.
* @param key The key.
* @return A clone value. Null if no mapping for the key
*/
byte[] getValue(byte[] key);
/**
- * It clone all bytes of all elements.
+ * Get all configuration values. It clone all bytes of all elements.
* @return All values
*/
Map getValues();
/**
- * @return True if hfile DATA type blocks should be cached (You cannot disable caching of INDEX
- * and BLOOM type blocks).
+ * Returns True if hfile DATA type blocks should be cached (You cannot disable caching of INDEX
+ * and BLOOM type blocks).
*/
boolean isBlockCacheEnabled();
@@ -202,8 +202,8 @@ public interface ColumnFamilyDescriptor {
boolean isCacheIndexesOnWrite();
/**
- * @return Whether KV tags should be compressed along with DataBlockEncoding. When no
- * DataBlockEncoding is been used, this is having no effect.
+ * Returns Whether KV tags should be compressed along with DataBlockEncoding. When no
+ * DataBlockEncoding is been used, this is having no effect.
*/
boolean isCompressTags();
@@ -211,8 +211,8 @@ public interface ColumnFamilyDescriptor {
boolean isEvictBlocksOnClose();
/**
- * @return True if we are to favor keeping all values for this column family in the HRegionServer
- * cache.
+ * Returns True if we are to favor keeping all values for this column family in the HRegionServer
+ * cache.
*/
boolean isInMemory();
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java
index 80178027b6f..cf4de2edfec 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ColumnFamilyDescriptorBuilder.java
@@ -42,9 +42,6 @@ import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos.ColumnFamilySchema;
-/**
- * @since 2.0.0
- */
@InterfaceAudience.Public
public class ColumnFamilyDescriptorBuilder {
// For future backward compatibility
@@ -330,6 +327,7 @@ public class ColumnFamilyDescriptorBuilder {
}
/**
+ * Check if the column family name is legal.
* @param b Family name.
* @return b
* @throws IllegalArgumentException If not null and not a legitimate family name: i.e. 'printable'
@@ -398,6 +396,7 @@ public class ColumnFamilyDescriptorBuilder {
}
/**
+ * Serialize the table descriptor to a byte array.
* @param desc The table descriptor to serialize
* @return This instance serialized with pb with pb magic prefix
*/
@@ -690,11 +689,6 @@ public class ColumnFamilyDescriptorBuilder {
return Collections.unmodifiableMap(values);
}
- /**
- * @param key The key.
- * @param value The value.
- * @return this (for chained invocation)
- */
public ModifyableColumnFamilyDescriptor setValue(byte[] key, byte[] value) {
return setValue(toBytesOrNull(key, Function.identity()),
toBytesOrNull(value, Function.identity()));
@@ -708,11 +702,6 @@ public class ColumnFamilyDescriptorBuilder {
return setValue(key, toBytesOrNull(value, Bytes::toBytes));
}
- /**
- * @param key The key.
- * @param value The value.
- * @return this (for chained invocation)
- */
private ModifyableColumnFamilyDescriptor setValue(Bytes key, Bytes value) {
if (value == null || value.getLength() == 0) {
values.remove(key);
@@ -749,6 +738,7 @@ public class ColumnFamilyDescriptorBuilder {
}
/**
+ * Set the maximum number of versions to retain.
* @param maxVersions maximum number of versions
* @return this (for chained invocation)
*/
@@ -768,7 +758,7 @@ public class ColumnFamilyDescriptorBuilder {
}
/**
- * Set minimum and maximum versions to keep
+ * Set minimum and maximum versions to keep.
* @param minVersions minimal number of versions
* @param maxVersions maximum number of versions
* @return this (for chained invocation)
@@ -795,10 +785,6 @@ public class ColumnFamilyDescriptorBuilder {
return getStringOrDefault(BLOCKSIZE_BYTES, Integer::valueOf, DEFAULT_BLOCKSIZE);
}
- /**
- * @param s Blocksize to use when writing out storefiles/hfiles on this column family.
- * @return this (for chained invocation)
- */
public ModifyableColumnFamilyDescriptor setBlocksize(int s) {
return setValue(BLOCKSIZE_BYTES, Integer.toString(s));
}
@@ -901,6 +887,7 @@ public class ColumnFamilyDescriptorBuilder {
}
/**
+ * Set the inMemory flag
* @param inMemory True if we are to favor keeping all values for this column family in the
* HRegionServer cache
* @return this (for chained invocation)
@@ -915,10 +902,6 @@ public class ColumnFamilyDescriptorBuilder {
n -> MemoryCompactionPolicy.valueOf(n.toUpperCase()), null);
}
- /**
- * @param inMemoryCompaction the prefered in-memory compaction policy for this column family
- * @return this (for chained invocation)
- */
public ModifyableColumnFamilyDescriptor
setInMemoryCompaction(MemoryCompactionPolicy inMemoryCompaction) {
return setValue(IN_MEMORY_COMPACTION_BYTES, inMemoryCompaction.name());
@@ -930,10 +913,6 @@ public class ColumnFamilyDescriptorBuilder {
DEFAULT_KEEP_DELETED);
}
- /**
- * @param keepDeletedCells True if deleted rows should not be collected immediately.
- * @return this (for chained invocation)
- */
public ModifyableColumnFamilyDescriptor setKeepDeletedCells(KeepDeletedCells keepDeletedCells) {
return setValue(KEEP_DELETED_CELLS_BYTES, keepDeletedCells.name());
}
@@ -959,6 +938,7 @@ public class ColumnFamilyDescriptorBuilder {
}
/**
+ * Set the time to live
* @param timeToLive Time-to-live of cell contents, in seconds.
* @return this (for chained invocation)
*/
@@ -967,9 +947,10 @@ public class ColumnFamilyDescriptorBuilder {
}
/**
+ * Set the time to live
* @param timeToLive Time-to-live of cell contents, in seconds.
* @return this (for chained invocation)
- * @throws org.apache.hadoop.hbase.exceptions.HBaseException
+ * @throws org.apache.hadoop.hbase.exceptions.HBaseException exception
*/
public ModifyableColumnFamilyDescriptor setTimeToLive(String timeToLive) throws HBaseException {
return setTimeToLive(Integer.parseInt(PrettyPrinter.valueOf(timeToLive, Unit.TIME_INTERVAL)));
@@ -981,6 +962,7 @@ public class ColumnFamilyDescriptorBuilder {
}
/**
+ * Set minimum versions to retain.
* @param minVersions The minimum number of versions to keep. (used when timeToLive is set)
* @return this (for chained invocation)
*/
@@ -1010,6 +992,7 @@ public class ColumnFamilyDescriptorBuilder {
}
/**
+ * Set the blockCacheEnabled flag
* @param blockCacheEnabled True if hfile DATA type blocks should be cached (We always cache
* INDEX and BLOOM blocks; you cannot turn this off).
* @return this (for chained invocation)
@@ -1034,10 +1017,6 @@ public class ColumnFamilyDescriptorBuilder {
DEFAULT_REPLICATION_SCOPE);
}
- /**
- * @param scope the scope tag
- * @return this (for chained invocation)
- */
public ModifyableColumnFamilyDescriptor setScope(int scope) {
return setValue(REPLICATION_SCOPE_BYTES, Integer.toString(scope));
}
@@ -1049,6 +1028,7 @@ public class ColumnFamilyDescriptorBuilder {
}
/**
+ * Set the setCacheDataOnWrite flag
* @param value true if we should cache data blocks on write
* @return this (for chained invocation)
*/
@@ -1063,6 +1043,7 @@ public class ColumnFamilyDescriptorBuilder {
}
/**
+ * Set the setCacheIndexesOnWrite flag
* @param value true if we should cache index blocks on write
* @return this (for chained invocation)
*/
@@ -1077,6 +1058,7 @@ public class ColumnFamilyDescriptorBuilder {
}
/**
+ * Set the setCacheBloomsOnWrite flag.
* @param value true if we should cache bloomfilter blocks on write
* @return this (for chained invocation)
*/
@@ -1091,6 +1073,7 @@ public class ColumnFamilyDescriptorBuilder {
}
/**
+ * Set the setEvictBlocksOnClose flag.
* @param value true if we should evict cached blocks from the blockcache on close
* @return this (for chained invocation)
*/
@@ -1105,6 +1088,7 @@ public class ColumnFamilyDescriptorBuilder {
}
/**
+ * Set the setPrefetchBlocksOnOpen flag
* @param value true if we should prefetch blocks into the blockcache on open
* @return this (for chained invocation)
*/
@@ -1232,19 +1216,17 @@ public class ColumnFamilyDescriptorBuilder {
return COMPARATOR.compare(this, other);
}
- /**
- * @return This instance serialized with pb with pb magic prefix
- * @see #parseFrom(byte[])
- */
+ /** Returns This instance serialized with pb with pb magic prefix */
private byte[] toByteArray() {
return ProtobufUtil.prependPBMagic(ProtobufUtil.toColumnFamilySchema(this).toByteArray());
}
/**
+ * Parse the serialized representation of a {@link ModifyableColumnFamilyDescriptor}
* @param bytes A pb serialized {@link ModifyableColumnFamilyDescriptor} instance with pb magic
* prefix
* @return An instance of {@link ModifyableColumnFamilyDescriptor} made from bytes
- * n * @see #toByteArray()
+ * @see #toByteArray()
*/
private static ColumnFamilyDescriptor parseFrom(final byte[] bytes)
throws DeserializationException {
@@ -1290,8 +1272,8 @@ public class ColumnFamilyDescriptorBuilder {
}
/**
- * Remove a configuration setting represented by the key from the {@link #configuration} map. n
- * * @return this (for chained invocation)
+ * Remove a configuration setting represented by the key from the {@link #configuration} map.
+ * @return this (for chained invocation)
*/
public ModifyableColumnFamilyDescriptor removeConfiguration(final String key) {
return setConfiguration(key, null);
@@ -1303,8 +1285,8 @@ public class ColumnFamilyDescriptorBuilder {
}
/**
- * Set the encryption algorithm for use with this family n * @return this (for chained
- * invocation)
+ * Set the encryption algorithm for use with this family
+ * @return this (for chained invocation)
*/
public ModifyableColumnFamilyDescriptor setEncryptionType(String algorithm) {
return setValue(ENCRYPTION_BYTES, algorithm);
@@ -1316,7 +1298,8 @@ public class ColumnFamilyDescriptorBuilder {
}
/**
- * Set the raw crypto key attribute for the family n * @return this (for chained invocation)
+ * Set the raw crypto key attribute for the family
+ * @return this (for chained invocation)
*/
public ModifyableColumnFamilyDescriptor setEncryptionKey(byte[] keyBytes) {
return setValue(ENCRYPTION_KEY_BYTES, new Bytes(keyBytes));
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CompactType.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CompactType.java
index 225bb072db7..412fd76d9ec 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CompactType.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/CompactType.java
@@ -26,9 +26,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Public
public enum CompactType {
- NORMAL(0),
- MOB(1);
+ NORMAL,
+ MOB
- CompactType(int value) {
- }
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java
index 755c0ca0b8c..545e8c38c4b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Delete.java
@@ -111,8 +111,6 @@ public class Delete extends Mutation {
*
* This timestamp is ONLY used for a delete row operation. If specifying families or columns, you
* must specify each timestamp individually.
- * @param row We make a local copy of this passed in row. nn * @param timestamp maximum version
- * timestamp (only for delete row)
*/
public Delete(final byte[] row, final int rowOffset, final int rowLength, long timestamp) {
checkRow(row, rowOffset, rowLength);
@@ -121,6 +119,7 @@ public class Delete extends Mutation {
}
/**
+ * Create a Delete operation using another Delete as template.
* @param deleteToCopy delete to copy
*/
public Delete(final Delete deleteToCopy) {
@@ -144,6 +143,7 @@ public class Delete extends Mutation {
* @param cell An existing cell of type "delete".
* @return this for invocation chaining n
*/
+ @Override
public Delete add(Cell cell) throws IOException {
super.add(cell);
return this;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java
index 17975ff631d..f4e06101255 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Get.java
@@ -445,7 +445,7 @@ public class Get extends Query implements Row {
if (this == obj) {
return true;
}
- if (obj == null || getClass() != obj.getClass()) {
+ if (!(obj instanceof Row)) {
return false;
}
Row other = (Row) obj;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java
index dea15cac8d5..e4b177e3bca 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Increment.java
@@ -96,6 +96,7 @@ public class Increment extends Mutation {
* Add the specified KeyValue to this operation.
* @param cell individual Cell n * @throws java.io.IOException e
*/
+ @Override
public Increment add(Cell cell) throws IOException {
super.add(cell);
return this;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/IsolationLevel.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/IsolationLevel.java
index 7804e48de9f..3f94fc30f9c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/IsolationLevel.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/IsolationLevel.java
@@ -29,11 +29,8 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Public
public enum IsolationLevel {
- READ_COMMITTED(1),
- READ_UNCOMMITTED(2);
-
- IsolationLevel(int value) {
- }
+ READ_COMMITTED,
+ READ_UNCOMMITTED;
public byte[] toBytes() {
return new byte[] { toByte() };
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/LogQueryFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/LogQueryFilter.java
index b2d217da3de..61137076a12 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/LogQueryFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/LogQueryFilter.java
@@ -112,13 +112,10 @@ public class LogQueryFilter {
if (this == o) {
return true;
}
-
- if (o == null || getClass() != o.getClass()) {
+ if (!(o instanceof LogQueryFilter)) {
return false;
}
-
LogQueryFilter that = (LogQueryFilter) o;
-
return new EqualsBuilder().append(limit, that.limit).append(regionName, that.regionName)
.append(clientAddress, that.clientAddress).append(tableName, that.tableName)
.append(userName, that.userName).append(type, that.type)
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterCoprocessorRpcChannelImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterCoprocessorRpcChannelImpl.java
index 4be0362be85..f36b3e16218 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterCoprocessorRpcChannelImpl.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterCoprocessorRpcChannelImpl.java
@@ -78,11 +78,11 @@ class MasterCoprocessorRpcChannelImpl implements RpcChannel {
Message responsePrototype, RpcCallback done) {
addListener(
callerBuilder.action((c, s) -> rpcCall(method, request, responsePrototype, c, s)).call(),
- ((r, e) -> {
+ (r, e) -> {
if (e != null) {
((ClientCoprocessorRpcController) controller).setFailed(e);
}
done.run(r);
- }));
+ });
}
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterRegistry.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterRegistry.java
index a031d353097..28ac5bb4528 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterRegistry.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MasterRegistry.java
@@ -34,6 +34,7 @@ import org.apache.hadoop.hbase.util.DNS.ServerType;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
+import org.apache.hbase.thirdparty.com.google.common.base.Splitter;
import org.apache.hbase.thirdparty.com.google.common.base.Strings;
import org.apache.hbase.thirdparty.com.google.common.net.HostAndPort;
@@ -78,7 +79,8 @@ public class MasterRegistry extends AbstractRpcBasedConnectionRegistry {
public static Set parseMasterAddrs(Configuration conf) throws UnknownHostException {
Set masterAddrs = new HashSet<>();
String configuredMasters = getMasterAddr(conf);
- for (String masterAddr : configuredMasters.split(MASTER_ADDRS_CONF_SEPARATOR)) {
+ for (String masterAddr : Splitter.onPattern(MASTER_ADDRS_CONF_SEPARATOR)
+ .split(configuredMasters)) {
HostAndPort masterHostPort =
HostAndPort.fromString(masterAddr.trim()).withDefaultPort(HConstants.DEFAULT_MASTER_PORT);
masterAddrs.add(ServerName.valueOf(masterHostPort.toString(), ServerName.NON_STARTCODE));
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiResponse.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiResponse.java
index b768b76fdb6..341341b4336 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiResponse.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MultiResponse.java
@@ -53,11 +53,7 @@ public class MultiResponse extends AbstractResponse {
return size;
}
- /**
- * Add the pair to the container, grouped by the regionName n * @param originalIndex the original
- * index of the Action (request).
- * @param resOrEx the result or error; will be empty for successful Put and Delete actions.
- */
+ /** Add the pair to the container, grouped by the regionName. */
public void add(byte[] regionName, int originalIndex, Object resOrEx) {
getResult(regionName).addResult(originalIndex, resOrEx);
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MutableRegionInfo.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MutableRegionInfo.java
index 7627f8dc0e5..fbb76ea4f65 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MutableRegionInfo.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/MutableRegionInfo.java
@@ -230,6 +230,7 @@ class MutableRegionInfo implements RegionInfo {
}
/**
+ * Change the split status flag.
* @param split set split status n
*/
public MutableRegionInfo setSplit(boolean split) {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
index 3f82e4110eb..df9e92f74dc 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Mutation.java
@@ -156,7 +156,7 @@ public abstract class Mutation extends OperationWithAttributes
return list;
}
- /*
+ /**
* Create a KeyValue with this objects row key and the Put identifier.
* @return a KeyValue with this objects row key and the Put identifier.
*/
@@ -165,8 +165,7 @@ public abstract class Mutation extends OperationWithAttributes
}
/**
- * Create a KeyValue with this objects row key and the Put identifier. nnnn * @param tags -
- * Specify the Tags as an Array
+ * Create a KeyValue with this objects row key and the Put identifier.
* @return a KeyValue with this objects row key and the Put identifier.
*/
KeyValue createPutKeyValue(byte[] family, byte[] qualifier, long ts, byte[] value, Tag[] tags) {
@@ -174,7 +173,7 @@ public abstract class Mutation extends OperationWithAttributes
return kvWithTag;
}
- /*
+ /**
* Create a KeyValue with this objects row key and the Put identifier.
* @return a KeyValue with this objects row key and the Put identifier.
*/
@@ -187,7 +186,7 @@ public abstract class Mutation extends OperationWithAttributes
/**
* Compile the column family (i.e. schema) information into a Map. Useful for parsing and
- * aggregation by debugging, logging, and administration tools. n
+ * aggregation by debugging, logging, and administration tools.
*/
@Override
public Map getFingerprint() {
@@ -261,7 +260,7 @@ public abstract class Mutation extends OperationWithAttributes
if (tags != null) {
List tagsString = new ArrayList<>(tags.size());
for (Tag t : tags) {
- tagsString.add((t.getType()) + ":" + Bytes.toStringBinary(Tag.cloneValue(t)));
+ tagsString.add(t.getType() + ":" + Bytes.toStringBinary(Tag.cloneValue(t)));
}
stringMap.put("tag", tagsString);
}
@@ -445,6 +444,7 @@ public abstract class Mutation extends OperationWithAttributes
}
/**
+ * Set the ACL for this operation.
* @param user User short name
* @param perms Permissions for the user
*/
@@ -455,6 +455,7 @@ public abstract class Mutation extends OperationWithAttributes
}
/**
+ * Set the ACL for this operation.
* @param perms A map of permissions for a user or users
*/
public Mutation setACL(Map perms) {
@@ -596,11 +597,9 @@ public abstract class Mutation extends OperationWithAttributes
return filteredList;
}
- /*
+ /**
* Private method to determine if this object's familyMap contains the given value assigned to the
- * given family, qualifier and timestamp respecting the 2 boolean arguments nnnnnn * @return
- * returns true if the given family, qualifier timestamp and value already has an existing
- * KeyValue object in the family map.
+ * given family, qualifier and timestamp, respecting the 2 boolean arguments.
*/
protected boolean has(byte[] family, byte[] qualifier, long ts, byte[] value, boolean ignoreTS,
boolean ignoreValue) {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java
index bcc1bda9ef0..5e821f07546 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Put.java
@@ -203,6 +203,7 @@ public class Put extends Mutation implements HeapSize {
* immutable and its backing array will not be modified for the duration of this Put.
* @param cell individual cell n * @throws java.io.IOException e
*/
+ @Override
public Put add(Cell cell) throws IOException {
super.add(cell);
return this;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Query.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Query.java
index 750993c1a8a..cf892ae0d74 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Query.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Query.java
@@ -88,6 +88,7 @@ public abstract class Query extends OperationWithAttributes {
}
/**
+ * Set the ACL for the operation.
* @param user User short name
* @param perms Permissions for the user
*/
@@ -98,6 +99,7 @@ public abstract class Query extends OperationWithAttributes {
}
/**
+ * Set the ACL for the operation.
* @param perms A map of permissions for a user or users
*/
public Query setACL(Map perms) {
@@ -159,9 +161,8 @@ public abstract class Query extends OperationWithAttributes {
}
/**
- * @return The isolation level of this query. If no isolation level was set for this query object,
- * then it returns READ_COMMITTED.
- * @return The IsolationLevel for this query
+ * Returns The isolation level of this query. If no isolation level was set for this query object,
+ * then it returns READ_COMMITTED.
*/
public IsolationLevel getIsolationLevel() {
byte[] attr = getAttribute(ISOLATION_LEVEL);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
index 3ec2c741293..4d614907326 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncHBaseAdmin.java
@@ -502,11 +502,6 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
return future;
}
- @FunctionalInterface
- private interface TableOperator {
- CompletableFuture operate(TableName table);
- }
-
@Override
public CompletableFuture tableExists(TableName tableName) {
if (TableName.isMetaTableName(tableName)) {
@@ -1559,11 +1554,13 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
future.completeExceptionally(err);
return;
}
- addListener(this. newMasterCaller().priority(regionInfo.getTable())
- .action(((controller, stub) -> this. call(
- controller, stub, RequestConverter.buildAssignRegionRequest(regionInfo.getRegionName()),
- (s, c, req, done) -> s.assignRegion(c, req, done), resp -> null)))
- .call(), (ret, err2) -> {
+ addListener(
+ this. newMasterCaller().priority(regionInfo.getTable())
+ .action((controller, stub) -> this. call(
+ controller, stub, RequestConverter.buildAssignRegionRequest(regionInfo.getRegionName()),
+ (s, c, req, done) -> s.assignRegion(c, req, done), resp -> null))
+ .call(),
+ (ret, err2) -> {
if (err2 != null) {
future.completeExceptionally(err2);
} else {
@@ -1584,10 +1581,10 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
}
addListener(
this. newMasterCaller().priority(regionInfo.getTable())
- .action(((controller, stub) -> this. this. call(controller, stub,
RequestConverter.buildUnassignRegionRequest(regionInfo.getRegionName()),
- (s, c, req, done) -> s.unassignRegion(c, req, done), resp -> null)))
+ (s, c, req, done) -> s.unassignRegion(c, req, done), resp -> null))
.call(),
(ret, err2) -> {
if (err2 != null) {
@@ -1608,14 +1605,11 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
future.completeExceptionally(err);
return;
}
- addListener(
- this. newMasterCaller().priority(regionInfo.getTable())
- .action(((controller, stub) -> this. call(controller, stub,
- RequestConverter.buildOfflineRegionRequest(regionInfo.getRegionName()),
- (s, c, req, done) -> s.offlineRegion(c, req, done), resp -> null)))
- .call(),
- (ret, err2) -> {
+ addListener(this. newMasterCaller().priority(regionInfo.getTable())
+ .action((controller, stub) -> this. call(
+ controller, stub, RequestConverter.buildOfflineRegionRequest(regionInfo.getRegionName()),
+ (s, c, req, done) -> s.offlineRegion(c, req, done), resp -> null))
+ .call(), (ret, err2) -> {
if (err2 != null) {
future.completeExceptionally(err2);
} else {
@@ -2236,7 +2230,7 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
listSnapshotsFuture = getCompletedSnapshots(tableNamePattern, snapshotNamePattern);
}
CompletableFuture future = new CompletableFuture<>();
- addListener(listSnapshotsFuture, ((snapshotDescriptions, err) -> {
+ addListener(listSnapshotsFuture, (snapshotDescriptions, err) -> {
if (err != null) {
future.completeExceptionally(err);
return;
@@ -2253,7 +2247,7 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
future.complete(v);
}
});
- }));
+ });
return future;
}
@@ -4010,10 +4004,9 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
@Override
public CompletableFuture addRSGroup(String groupName) {
return this. newMasterCaller()
- .action(
- ((controller, stub) -> this. call(controller,
- stub, AddRSGroupRequest.newBuilder().setRSGroupName(groupName).build(),
- (s, c, req, done) -> s.addRSGroup(c, req, done), resp -> null)))
+ .action((controller, stub) -> this. call(
+ controller, stub, AddRSGroupRequest.newBuilder().setRSGroupName(groupName).build(),
+ (s, c, req, done) -> s.addRSGroup(c, req, done), resp -> null))
.call();
}
@@ -4081,9 +4074,9 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
private CompletableFuture clearSlowLogsResponses(final ServerName serverName) {
return this. newAdminCaller()
- .action(((controller, stub) -> this.adminCall(controller, stub,
+ .action((controller, stub) -> this.adminCall(controller, stub,
RequestConverter.buildClearSlowLogResponseRequest(),
- AdminService.Interface::clearSlowLogsResponses, ProtobufUtil::toClearSlowLogPayload)))
+ AdminService.Interface::clearSlowLogsResponses, ProtobufUtil::toClearSlowLogPayload))
.serverName(serverName).call();
}
@@ -4124,15 +4117,14 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
@Override
public CompletableFuture getRSGroup(Address hostPort) {
return this. newMasterCaller()
- .action(
- ((controller, stub) -> this. call(controller, stub, GetRSGroupInfoOfServerRequest.newBuilder()
+ .action((controller, stub) -> this. call(controller, stub,
+ GetRSGroupInfoOfServerRequest.newBuilder()
.setServer(HBaseProtos.ServerName.newBuilder().setHostName(hostPort.getHostname())
.setPort(hostPort.getPort()).build())
- .build(), (s, c, req, done) -> s.getRSGroupInfoOfServer(c, req, done),
- resp -> resp.hasRSGroupInfo()
- ? ProtobufUtil.toGroupInfo(resp.getRSGroupInfo())
- : null)))
+ .build(),
+ (s, c, req, done) -> s.getRSGroupInfoOfServer(c, req, done),
+ resp -> resp.hasRSGroupInfo() ? ProtobufUtil.toGroupInfo(resp.getRSGroupInfo()) : null))
.call();
}
@@ -4160,7 +4152,7 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
}
});
}
- addListener(listTableDescriptors(new ArrayList<>(tables)), ((tableDescriptions, err) -> {
+ addListener(listTableDescriptors(new ArrayList<>(tables)), (tableDescriptions, err) -> {
if (err != null) {
future.completeExceptionally(err);
return;
@@ -4184,40 +4176,40 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
future.complete(v);
}
});
- }));
+ });
return future;
}
@Override
public CompletableFuture getRSGroup(TableName table) {
return this. newMasterCaller()
- .action(((controller, stub) -> this. this. call(controller, stub,
GetRSGroupInfoOfTableRequest.newBuilder()
.setTableName(ProtobufUtil.toProtoTableName(table)).build(),
(s, c, req, done) -> s.getRSGroupInfoOfTable(c, req, done),
- resp -> resp.hasRSGroupInfo() ? ProtobufUtil.toGroupInfo(resp.getRSGroupInfo()) : null)))
+ resp -> resp.hasRSGroupInfo() ? ProtobufUtil.toGroupInfo(resp.getRSGroupInfo()) : null))
.call();
}
@Override
public CompletableFuture getRSGroup(String groupName) {
return this. newMasterCaller()
- .action(((controller, stub) -> this. this. call(controller, stub,
GetRSGroupInfoRequest.newBuilder().setRSGroupName(groupName).build(),
(s, c, req, done) -> s.getRSGroupInfo(c, req, done),
- resp -> resp.hasRSGroupInfo() ? ProtobufUtil.toGroupInfo(resp.getRSGroupInfo()) : null)))
+ resp -> resp.hasRSGroupInfo() ? ProtobufUtil.toGroupInfo(resp.getRSGroupInfo()) : null))
.call();
}
@Override
public CompletableFuture renameRSGroup(String oldName, String newName) {
return this. newMasterCaller()
- .action(((controller, stub) -> this. call(
+ .action((controller, stub) -> this. call(
controller, stub, RenameRSGroupRequest.newBuilder().setOldRsgroupName(oldName)
.setNewRsgroupName(newName).build(),
- (s, c, req, done) -> s.renameRSGroup(c, req, done), resp -> null)))
+ (s, c, req, done) -> s.renameRSGroup(c, req, done), resp -> null))
.call();
}
@@ -4231,9 +4223,9 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
NameStringPair.newBuilder().setName(e.getKey()).setValue(e.getValue()).build()));
}
return this. newMasterCaller()
- .action(((controller, stub) -> this. this. call(controller, stub, request.build(),
- (s, c, req, done) -> s.updateRSGroupConfig(c, req, done), resp -> null)))
+ (s, c, req, done) -> s.updateRSGroupConfig(c, req, done), resp -> null))
.call();
}
@@ -4287,9 +4279,9 @@ class RawAsyncHBaseAdmin implements AsyncAdmin {
public CompletableFuture flushMasterStore() {
FlushMasterStoreRequest.Builder request = FlushMasterStoreRequest.newBuilder();
return this. newMasterCaller()
- .action(((controller, stub) -> this. this. call(controller, stub, request.build(),
- (s, c, req, done) -> s.flushMasterStore(c, req, done), resp -> null)))
+ (s, c, req, done) -> s.flushMasterStore(c, req, done), resp -> null))
.call();
}
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java
index af0b2090803..ff75c0725ce 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RawAsyncTableImpl.java
@@ -776,7 +776,7 @@ class RawAsyncTableImpl implements AsyncTable {
int c = Bytes.compareTo(endKey, region.getEndKey());
// 1. if the region contains endKey
// 2. endKey is equal to the region's endKey and we do not want to include endKey.
- return c < 0 || c == 0 && !endKeyInclusive;
+ return c < 0 || (c == 0 && !endKeyInclusive);
}
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorServiceExec.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorServiceExec.java
index 4bf72607946..de3bc9a90d4 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorServiceExec.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionCoprocessorServiceExec.java
@@ -92,7 +92,7 @@ public class RegionCoprocessorServiceExec implements Row {
if (this == obj) {
return true;
}
- if (obj == null || getClass() != obj.getClass()) {
+ if (!(obj instanceof RegionCoprocessorServiceExec)) {
return false;
}
return compareTo((RegionCoprocessorServiceExec) obj) == 0;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionInfo.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionInfo.java
index 32c7da05887..d1384d71469 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionInfo.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionInfo.java
@@ -160,7 +160,7 @@ public interface RegionInfo extends Comparable {
long getRegionId();
/**
- * @return the regionName as an array of bytes.
+ * Returns the regionName as an array of bytes.
* @see #getRegionNameAsString()
*/
byte[] getRegionName();
@@ -190,7 +190,7 @@ public interface RegionInfo extends Comparable {
boolean isSplit();
/**
- * @return True if this region is offline.
+ * Returns True if this region is offline.
* @deprecated since 3.0.0 and will be removed in 4.0.0
* @see HBASE-25210
*/
@@ -198,7 +198,7 @@ public interface RegionInfo extends Comparable {
boolean isOffline();
/**
- * @return True if this is a split parent region.
+ * Returns True if this is a split parent region.
* @deprecated since 3.0.0 and will be removed in 4.0.0, Use {@link #isSplit()} instead.
* @see HBASE-25210
*/
@@ -209,9 +209,9 @@ public interface RegionInfo extends Comparable {
boolean isMetaRegion();
/**
- * @return true if the given inclusive range of rows is fully contained by this region. For
- * example, if the region is foo,a,g and this is passed ["b","c"] or ["a","c"] it will
- * return true, but if this is passed ["b","z"] it will return false.
+ * Returns true if the given inclusive range of rows is fully contained by this region. For
+ * example, if the region is foo,a,g and this is passed ["b","c"] or ["a","c"] it will return
+ * true, but if this is passed ["b","z"] it will return false.
* @throws IllegalArgumentException if the range passed is invalid (ie. end < start)
*/
boolean containsRange(byte[] rangeStartKey, byte[] rangeEndKey);
@@ -271,16 +271,16 @@ public interface RegionInfo extends Comparable {
}
/**
- * @return Return a String of short, printable names for hris
(usually encoded name)
- * for us logging.
+ * Returns a String of short, printable names for hris
(usually encoded name) for us
+ * logging.
*/
static String getShortNameToLog(RegionInfo... hris) {
return getShortNameToLog(Arrays.asList(hris));
}
/**
- * @return Return a String of short, printable names for hris
(usually encoded name)
- * for us logging.
+ * Returns a String of short, printable names for hris
(usually encoded name) for us
+ * logging.
*/
static String getShortNameToLog(final List ris) {
return ris.stream().map(RegionInfo::getEncodedName).collect(Collectors.toList()).toString();
@@ -369,8 +369,7 @@ public interface RegionInfo extends Comparable {
}
/**
- * @param bytes A pb RegionInfo serialized with a pb magic prefix.
- * @return A deserialized {@link RegionInfo}
+ * Returns A deserialized {@link RegionInfo}
*/
@InterfaceAudience.Private
static RegionInfo parseFrom(final byte[] bytes) throws DeserializationException {
@@ -379,6 +378,7 @@ public interface RegionInfo extends Comparable {
}
/**
+ * Parse a serialized representation of {@link RegionInfo}
* @param bytes A pb RegionInfo serialized with a pb magic prefix.
* @param offset starting point in the byte array
* @param len length to read on the byte array
@@ -423,7 +423,7 @@ public interface RegionInfo extends Comparable {
}
/**
- * @return This instance serialized as protobuf w/ a magic pb prefix.
+ * Returns This instance serialized as protobuf w/ a magic pb prefix.
* @see #parseFrom(byte[])
*/
static byte[] toByteArray(RegionInfo ri) {
@@ -751,7 +751,7 @@ public interface RegionInfo extends Comparable {
}
/**
- * @return True if region is next, adjacent but 'after' this one.
+ * Returns True if region is next, adjacent but 'after' this one.
* @see #isAdjacent(RegionInfo)
* @see #areAdjacent(RegionInfo, RegionInfo)
*/
@@ -760,7 +760,7 @@ public interface RegionInfo extends Comparable {
}
/**
- * @return True if region is adjacent, either just before or just after this one.
+ * Returns True if region is adjacent, either just before or just after this one.
* @see #isNext(RegionInfo)
*/
default boolean isAdjacent(RegionInfo other) {
@@ -773,7 +773,7 @@ public interface RegionInfo extends Comparable {
}
/**
- * @return True if an overlap in region range.
+ * Returns True if an overlap in region range.
* @see #isDegenerate()
*/
default boolean isOverlap(RegionInfo other) {
@@ -799,6 +799,7 @@ public interface RegionInfo extends Comparable {
return Bytes.compareTo(getStartKey(), other.getEndKey()) < 0;
}
+ @Override
default int compareTo(RegionInfo other) {
return RegionInfo.COMPARATOR.compare(this, other);
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionReplicaUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionReplicaUtil.java
index 4475a01c0f9..1d6708b49d1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionReplicaUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionReplicaUtil.java
@@ -51,7 +51,7 @@ public class RegionReplicaUtil {
/**
* Returns the RegionInfo for the given replicaId. RegionInfo's correspond to a range of a table,
* but more than one "instance" of the same range can be deployed which are differentiated by the
- * replicaId. n * @param replicaId the replicaId to use
+ * replicaId.
* @return an RegionInfo object corresponding to the same range (table, start and end key), but
* for the given replicaId.
*/
@@ -149,7 +149,7 @@ public class RegionReplicaUtil {
if ((newReplicaCount - 1) <= 0) {
return regions;
}
- List hRegionInfos = new ArrayList<>((newReplicaCount) * regions.size());
+ List hRegionInfos = new ArrayList<>(newReplicaCount * regions.size());
for (RegionInfo ri : regions) {
if (
RegionReplicaUtil.isDefaultReplica(ri)
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionServerCoprocessorRpcChannelImpl.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionServerCoprocessorRpcChannelImpl.java
index 29b092cad88..ed5fd16dab3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionServerCoprocessorRpcChannelImpl.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RegionServerCoprocessorRpcChannelImpl.java
@@ -78,11 +78,11 @@ public class RegionServerCoprocessorRpcChannelImpl implements RpcChannel {
Message responsePrototype, RpcCallback done) {
addListener(
callerBuilder.action((c, s) -> rpcCall(method, request, responsePrototype, c, s)).call(),
- ((r, e) -> {
+ (r, e) -> {
if (e != null) {
((ClientCoprocessorRpcController) controller).setFailed(e);
}
done.run(r);
- }));
+ });
}
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java
index 1717a9f6964..a1ab6075fe7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Result.java
@@ -894,8 +894,8 @@ public class Result implements CellScannable, CellScanner {
}
/**
- * @return the associated statistics about the region from which this was returned. Can be
- * null if stats are disabled.
+ * Returns the associated statistics about the region from which this was returned. Can be
+ * null if stats are disabled.
*/
public RegionLoadStats getStats() {
return stats;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowAccess.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowAccess.java
index cd1f4756079..1af8798fd87 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowAccess.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowAccess.java
@@ -22,7 +22,6 @@ import org.apache.yetus.audience.InterfaceAudience;
/**
* Provide a way to access the inner buffer. The purpose is to reduce the elapsed time to move a
* large number of elements between collections.
- * @param
*/
@InterfaceAudience.Private
public interface RowAccess extends Iterable {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java
index 0cf7f69e9b4..be44c26190b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/RowMutations.java
@@ -70,6 +70,7 @@ public class RowMutations implements Row {
}
/**
+ * Add a mutation
* @param mutation The data to send.
* @throws IOException if the row of added mutation doesn't match the original row
*/
@@ -78,6 +79,7 @@ public class RowMutations implements Row {
}
/**
+ * Add a list of mutations
* @param mutations The data to send.
* @throws IOException if the row of added mutation doesn't match the original row
*/
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
index 7e557fcfdb7..bdca990ca54 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Scan.java
@@ -710,9 +710,9 @@ public class Scan extends Query {
}
/**
- * @return true when the constructor of this scan understands that the results they will see may
- * only represent a partial portion of a row. The entire row would be retrieved by
- * subsequent calls to {@link ResultScanner#next()}
+ * Returns true when the constructor of this scan understands that the results they will see may
+ * only represent a partial portion of a row. The entire row would be retrieved by subsequent
+ * calls to {@link ResultScanner#next()}
*/
public boolean getAllowPartialResults() {
return allowPartialResults;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScanResultConsumer.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScanResultConsumer.java
index b574b2c2bd5..44f8e194045 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScanResultConsumer.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ScanResultConsumer.java
@@ -30,8 +30,7 @@ import org.apache.yetus.audience.InterfaceAudience;
public interface ScanResultConsumer extends ScanResultConsumerBase {
/**
- * @param result the data fetched from HBase service.
- * @return {@code false} if you want to terminate the scan process. Otherwise {@code true}
+ * Return {@code false} if you want to terminate the scan process. Otherwise {@code true}
*/
boolean onNext(Result result);
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ServiceCaller.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ServiceCaller.java
index 501f412bc57..fb4acbdf657 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ServiceCaller.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/ServiceCaller.java
@@ -28,20 +28,16 @@ import org.apache.hbase.thirdparty.com.google.protobuf.RpcController;
* Usually, it is just a simple lambda expression, like:
*
*
- *
- * (stub, controller, rpcCallback) -> {
+ * (stub, controller, rpcCallback) -> {
* XXXRequest request = ...; // prepare the request
* stub.xxx(controller, request, rpcCallback);
* }
- *
*
*
* And if already have the {@code request}, the lambda expression will be:
*
*
- *
- * (stub, controller, rpcCallback) -> stub.xxx(controller, request, rpcCallback)
- *
+ * (stub, controller, rpcCallback) -> stub.xxx(controller, request, rpcCallback)
*
*
* @param the type of the protobuf Service you want to call.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SlowLogParams.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SlowLogParams.java
index 3311539c261..b1460c0b116 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SlowLogParams.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/SlowLogParams.java
@@ -62,13 +62,10 @@ public class SlowLogParams {
if (this == o) {
return true;
}
-
- if (o == null || getClass() != o.getClass()) {
+ if (!(o instanceof SlowLogParams)) {
return false;
}
-
SlowLogParams that = (SlowLogParams) o;
-
return new EqualsBuilder().append(regionName, that.regionName).append(params, that.params)
.isEquals();
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java
index 0f93ab21a2c..53c33a667c3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/Table.java
@@ -30,7 +30,6 @@ import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.CompareOperator;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.coprocessor.Batch;
-import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.io.TimeRange;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
@@ -302,11 +301,13 @@ public interface Table extends Closeable {
interface CheckAndMutateBuilder {
/**
+ * Specify a column qualifer
* @param qualifier column qualifier to check.
*/
CheckAndMutateBuilder qualifier(byte[] qualifier);
/**
+ * Specify a timerange
* @param timeRange timeRange to check
*/
CheckAndMutateBuilder timeRange(TimeRange timeRange);
@@ -325,24 +326,28 @@ public interface Table extends Closeable {
}
/**
+ * Check for match.
* @param compareOp comparison operator to use
* @param value the expected value
*/
CheckAndMutateBuilder ifMatches(CompareOperator compareOp, byte[] value);
/**
+ * Specify a Put to commit if the check succeeds.
* @param put data to put if check succeeds
* @return {@code true} if the new put was executed, {@code false} otherwise.
*/
boolean thenPut(Put put) throws IOException;
/**
+ * Specify a Delete to commit if the check succeeds.
* @param delete data to delete if check succeeds
* @return {@code true} if the new delete was executed, {@code false} otherwise.
*/
boolean thenDelete(Delete delete) throws IOException;
/**
+ * Specify a RowMutations to commit if the check succeeds.
* @param mutation mutations to perform if check succeeds
* @return true if the new mutation was executed, false otherwise.
*/
@@ -379,23 +384,27 @@ public interface Table extends Closeable {
interface CheckAndMutateWithFilterBuilder {
/**
+ * Specify a timerange.
* @param timeRange timeRange to check
*/
CheckAndMutateWithFilterBuilder timeRange(TimeRange timeRange);
/**
+ * Specify a Put to commit if the check succeeds.
* @param put data to put if check succeeds
* @return {@code true} if the new put was executed, {@code false} otherwise.
*/
boolean thenPut(Put put) throws IOException;
/**
+ * Specify a Delete to commit if the check succeeds.
* @param delete data to delete if check succeeds
* @return {@code true} if the new delete was executed, {@code false} otherwise.
*/
boolean thenDelete(Delete delete) throws IOException;
/**
+ * Specify a RowMutations to commit if the check succeeds.
* @param mutation mutations to perform if check succeeds
* @return true if the new mutation was executed, false otherwise.
*/
@@ -660,7 +669,7 @@ public interface Table extends Closeable {
final Map results =
Collections.synchronizedMap(new TreeMap(Bytes.BYTES_COMPARATOR));
batchCoprocessorService(methodDescriptor, request, startKey, endKey, responsePrototype,
- new Callback() {
+ new Batch.Callback() {
@Override
public void update(byte[] region, byte[] row, R result) {
if (region != null) {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java
index f960f1c748c..f500a1128a5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptor.java
@@ -267,8 +267,8 @@ public interface TableDescriptor {
boolean isReadOnly();
/**
- * @return Name of this table and then a map of all of the column family descriptors (with only
- * the non-default column family attributes)
+ * Returns Name of this table and then a map of all of the column family descriptors (with only
+ * the non-default column family attributes)
*/
String toStringCustomizedValues();
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
index 3fa39ee8e85..d0d3e36aa8f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableDescriptorBuilder.java
@@ -143,8 +143,6 @@ public class TableDescriptorBuilder {
private static final Bytes REGION_MEMSTORE_REPLICATION_KEY =
new Bytes(Bytes.toBytes(REGION_MEMSTORE_REPLICATION));
- private static final Bytes REGION_REPLICA_WAIT_FOR_PRIMARY_FLUSH_CONF_KEY =
- new Bytes(Bytes.toBytes(RegionReplicaUtil.REGION_REPLICA_WAIT_FOR_PRIMARY_FLUSH_CONF_KEY));
/**
* Used by shell/rest interface to access this metadata attribute which denotes if the table
* should be treated by region normalizer.
@@ -302,10 +300,7 @@ public class TableDescriptorBuilder {
private final ModifyableTableDescriptor desc;
- /**
- * @param desc The table descriptor to serialize
- * @return This instance serialized with pb with pb magic prefix
- */
+ /** Returns This instance serialized with pb with pb magic prefix */
public static byte[] toByteArray(TableDescriptor desc) {
if (desc instanceof ModifyableTableDescriptor) {
return ((ModifyableTableDescriptor) desc).toByteArray();
@@ -317,7 +312,7 @@ public class TableDescriptorBuilder {
* The input should be created by {@link #toByteArray}.
* @param pbBytes A pb serialized TableDescriptor instance with pb magic prefix
* @return This instance serialized with pb with pb magic prefix
- * @throws org.apache.hadoop.hbase.exceptions.DeserializationException
+ * @throws org.apache.hadoop.hbase.exceptions.DeserializationException if an error occurred
*/
public static TableDescriptor parseFrom(byte[] pbBytes) throws DeserializationException {
return ModifyableTableDescriptor.parseFrom(pbBytes);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableOverAsyncTable.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableOverAsyncTable.java
index d4db9eb49e6..e1565f18159 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableOverAsyncTable.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableOverAsyncTable.java
@@ -47,8 +47,7 @@ import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.HRegionLocation;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.RetriesExhaustedException.ThrowableWithExtraContext;
-import org.apache.hadoop.hbase.client.coprocessor.Batch.Call;
-import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback;
+import org.apache.hadoop.hbase.client.coprocessor.Batch;
import org.apache.hadoop.hbase.client.trace.TableOperationSpanBuilder;
import org.apache.hadoop.hbase.filter.Filter;
import org.apache.hadoop.hbase.io.TimeRange;
@@ -143,8 +142,8 @@ class TableOverAsyncTable implements Table {
}
@Override
- public void batchCallback(List extends Row> actions, Object[] results, Callback callback)
- throws IOException, InterruptedException {
+ public void batchCallback(List extends Row> actions, Object[] results,
+ Batch.Callback callback) throws IOException, InterruptedException {
ConcurrentLinkedQueue errors = new ConcurrentLinkedQueue<>();
CountDownLatch latch = new CountDownLatch(actions.size());
AsyncTableRegionLocator locator = conn.getRegionLocator(getName());
@@ -467,7 +466,7 @@ class TableOverAsyncTable implements Table {
}
private void coprocessorService(String serviceName, byte[] startKey, byte[] endKey,
- Callback callback, StubCall call) throws Throwable {
+ Batch.Callback callback, StubCall call) throws Throwable {
// get regions covered by the row range
ExecutorService pool = Context.current().wrap(this.poolSupplier.get());
List keys = getStartKeysInRange(startKey, endKey);
@@ -509,7 +508,8 @@ class TableOverAsyncTable implements Table {
@Override
public void coprocessorService(Class service, byte[] startKey,
- byte[] endKey, Call callable, Callback callback) throws ServiceException, Throwable {
+ byte[] endKey, Batch.Call callable, Batch.Callback callback)
+ throws ServiceException, Throwable {
final Supplier supplier = new TableOperationSpanBuilder(conn)
.setTableName(table.getName()).setOperation(HBaseSemanticAttributes.Operation.COPROC_EXEC);
TraceUtil.trace(() -> {
@@ -526,8 +526,8 @@ class TableOverAsyncTable implements Table {
@SuppressWarnings("unchecked")
@Override
public void batchCoprocessorService(MethodDescriptor methodDescriptor,
- Message request, byte[] startKey, byte[] endKey, R responsePrototype, Callback callback)
- throws ServiceException, Throwable {
+ Message request, byte[] startKey, byte[] endKey, R responsePrototype,
+ Batch.Callback callback) throws ServiceException, Throwable {
final Supplier supplier = new TableOperationSpanBuilder(conn)
.setTableName(table.getName()).setOperation(HBaseSemanticAttributes.Operation.COPROC_EXEC);
TraceUtil.trace(() -> {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableState.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableState.java
index 6ecd97a75c9..4e20302be45 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableState.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/TableState.java
@@ -209,15 +209,19 @@ public class TableState {
@Override
public boolean equals(Object o) {
- if (this == o) return true;
- if (o == null || getClass() != o.getClass()) return false;
-
- TableState that = (TableState) o;
-
- if (state != that.state) return false;
- if (tableName != null ? !tableName.equals(that.tableName) : that.tableName != null)
+ if (this == o) {
+ return true;
+ }
+ if (!(o instanceof TableState)) {
return false;
-
+ }
+ TableState that = (TableState) o;
+ if (state != that.state) {
+ return false;
+ }
+ if (tableName != null ? !tableName.equals(that.tableName) : that.tableName != null) {
+ return false;
+ }
return true;
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ClientBackoffPolicyFactory.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ClientBackoffPolicyFactory.java
index a786702b169..6993a4ac21a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ClientBackoffPolicyFactory.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ClientBackoffPolicyFactory.java
@@ -22,15 +22,11 @@ import org.apache.hadoop.hbase.ServerName;
import org.apache.hadoop.hbase.util.ReflectionUtils;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public final class ClientBackoffPolicyFactory {
- private static final Logger LOG = LoggerFactory.getLogger(ClientBackoffPolicyFactory.class);
-
private ClientBackoffPolicyFactory() {
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java
index aa84207e1ed..0e3339948ea 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/backoff/ExponentialClientBackoffPolicy.java
@@ -21,8 +21,6 @@ import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HConstants;
import org.apache.hadoop.hbase.ServerName;
import org.apache.yetus.audience.InterfaceAudience;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
@@ -33,8 +31,6 @@ import org.apache.hbase.thirdparty.com.google.common.base.Preconditions;
@InterfaceAudience.Public
public class ExponentialClientBackoffPolicy implements ClientBackoffPolicy {
- private static final Logger LOG = LoggerFactory.getLogger(ExponentialClientBackoffPolicy.class);
-
private static final long ONE_MINUTE = 60 * 1000;
public static final long DEFAULT_MAX_BACKOFF = 5 * ONE_MINUTE;
public static final String MAX_BACKOFF_KEY = "hbase.client.exponential-backoff.max";
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java
index 900f96440dc..1e42be4baad 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/coprocessor/BigDecimalColumnInterpreter.java
@@ -80,7 +80,7 @@ public class BigDecimalColumnInterpreter
@Override
public BigDecimal increment(BigDecimal bd) {
- return bd == null ? null : (bd.add(BigDecimal.ONE));
+ return bd == null ? null : bd.add(BigDecimal.ONE);
}
@Override
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java
index 4145a348b08..b2ddc4eaec1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/client/replication/ReplicationPeerConfigUtil.java
@@ -22,6 +22,7 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
+import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Set;
@@ -113,8 +114,8 @@ public final class ReplicationPeerConfigUtil {
}
ReplicationProtos.TableCF.Builder tableCFBuilder = ReplicationProtos.TableCF.newBuilder();
- String[] tables = tableCFsConfig.split(";");
- List tableCFList = new ArrayList<>(tables.length);
+ List tables = Splitter.on(';').splitToList(tableCFsConfig);
+ List tableCFList = new ArrayList<>(tables.size());
for (String tab : tables) {
// 1 ignore empty table config
@@ -124,9 +125,9 @@ public final class ReplicationPeerConfigUtil {
}
// 2 split to "table" and "cf1,cf2"
// for each table: "table#cf1,cf2" or "table"
- String[] pair = tab.split(":");
- String tabName = pair[0].trim();
- if (pair.length > 2 || tabName.length() == 0) {
+ Iterator i = Splitter.on(':').split(tab).iterator();
+ String tabName = i.next().trim();
+ if (tabName.length() == 0) {
LOG.info("incorrect format:" + tableCFsConfig);
continue;
}
@@ -135,16 +136,17 @@ public final class ReplicationPeerConfigUtil {
// split namespace from tableName
String ns = "default";
String tName = tabName;
- String[] dbs = tabName.split("\\.");
- if (dbs != null && dbs.length == 2) {
- ns = dbs[0];
- tName = dbs[1];
+ List dbs = Splitter.on('.').splitToList(tabName);
+ if (dbs != null && dbs.size() == 2) {
+ Iterator ii = dbs.iterator();
+ ns = ii.next();
+ tName = ii.next();
}
tableCFBuilder.setTableName(ProtobufUtil.toProtoTableName(TableName.valueOf(ns, tName)));
// 3 parse "cf1,cf2" part to List
- if (pair.length == 2) {
- String[] cfsList = pair[1].split(",");
+ if (i.hasNext()) {
+ List cfsList = Splitter.on(',').splitToList(i.next());
for (String cf : cfsList) {
String cfName = cf.trim();
if (cfName.length() > 0) {
@@ -241,6 +243,7 @@ public final class ReplicationPeerConfigUtil {
}
/**
+ * Parse the serialized representation of a peer configuration.
* @param bytes Content of a peer znode.
* @return ClusterKey parsed from the passed bytes.
* @throws DeserializationException deserialization exception
@@ -384,10 +387,9 @@ public final class ReplicationPeerConfigUtil {
}
/**
- * @param peerConfig peer config of replication peer
- * @return Serialized protobuf of peerConfig
with pb magic prefix prepended suitable
- * for use as content of a this.peersZNode; i.e. the content of PEER_ID znode under
- * /hbase/replication/peers/PEER_ID
+ * Returns Serialized protobuf of peerConfig
with pb magic prefix prepended suitable
+ * for use as content of a this.peersZNode; i.e. the content of PEER_ID znode under
+ * /hbase/replication/peers/PEER_ID
*/
public static byte[] toByteArray(final ReplicationPeerConfig peerConfig) {
byte[] bytes = convert(peerConfig).toByteArray();
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java
index 0e24e0c9814..73e3b53eb36 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/coprocessor/ColumnInterpreter.java
@@ -40,11 +40,12 @@ import org.apache.hbase.thirdparty.com.google.protobuf.Message;
* {@link ColumnInterpreter#castToReturnType(Object)} which takes a <T> type and returns a
* <S> type. The AggregateIm>lementation uses PB messages to initialize the user's
* ColumnInterpreter implementation, and for sending the responses back to AggregationClient.
- * @param T Cell value data type
- * @param S Promoted data type
- * @param P PB message that is used to transport initializer specific bytes
- * @param Q PB message that is used to transport Cell (<T>) instance
- * @param R PB message that is used to transport Promoted (<S>) instance
+ *
+ * <T> Cell value data type
+ * <S> Promoted data type
+ * <P> PB message that is used to transport initializer specific bytes
+ * <Q> PB message that is used to transport Cell (<T>) instance
+ * <R> PB message that is used to transport Promoted (<S>) instance
*/
@InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.COPROC)
@InterfaceStability.Evolving
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionOpeningException.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionOpeningException.java
index 347c6b987a1..bef80b2cb82 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionOpeningException.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/exceptions/RegionOpeningException.java
@@ -20,8 +20,6 @@ package org.apache.hadoop.hbase.exceptions;
import org.apache.hadoop.hbase.NotServingRegionException;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
/**
* Subclass if the server knows the region is now on another server. This allows the client to call
@@ -30,7 +28,7 @@ import org.slf4j.LoggerFactory;
@InterfaceAudience.Private
@InterfaceStability.Evolving
public class RegionOpeningException extends NotServingRegionException {
- private static final Logger LOG = LoggerFactory.getLogger(RegionOpeningException.class);
+
private static final long serialVersionUID = -7232903522310558395L;
public RegionOpeningException(String message) {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BigDecimalComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BigDecimalComparator.java
index 669a591a961..b37bec4fc51 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BigDecimalComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BigDecimalComparator.java
@@ -82,9 +82,10 @@ public class BigDecimalComparator extends ByteArrayComparable {
}
/**
+ * Parse a serialized representation of {@link BigDecimalComparator}
* @param pbBytes A pb serialized {@link BigDecimalComparator} instance
* @return An instance of {@link BigDecimalComparator} made from bytes
- * @throws DeserializationException A deserialization exception
+ * @throws DeserializationException if an error occurred
* @see #toByteArray
*/
public static BigDecimalComparator parseFrom(final byte[] pbBytes)
@@ -100,10 +101,10 @@ public class BigDecimalComparator extends ByteArrayComparable {
}
/**
- * @param other the other comparator
- * @return true if and only if the fields of the comparator that are serialized are equal to the
- * corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the comparator that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
+ @SuppressWarnings("ReferenceEquality")
boolean areSerializedFieldsEqual(BigDecimalComparator other) {
if (other == this) {
return true;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java
index 7aff979f4c2..079cee8e642 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComparator.java
@@ -64,9 +64,11 @@ public class BinaryComparator extends org.apache.hadoop.hbase.filter.ByteArrayCo
}
/**
+ * Parse a serialized representation of {@link BinaryComparator}
* @param pbBytes A pb serialized {@link BinaryComparator} instance
- * @return An instance of {@link BinaryComparator} made from bytes
n * @see
- * #toByteArray
+ * @return An instance of {@link BinaryComparator} made from bytes
+ * @throws DeserializationException if an error occurred
+ * @see #toByteArray
*/
public static BinaryComparator parseFrom(final byte[] pbBytes) throws DeserializationException {
ComparatorProtos.BinaryComparator proto;
@@ -79,14 +81,17 @@ public class BinaryComparator extends org.apache.hadoop.hbase.filter.ByteArrayCo
}
/**
- * n * @return true if and only if the fields of the comparator that are serialized are equal to
- * the corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the comparator that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(ByteArrayComparable other) {
- if (other == this) return true;
- if (!(other instanceof BinaryComparator)) return false;
-
+ if (other == this) {
+ return true;
+ }
+ if (!(other instanceof BinaryComparator)) {
+ return false;
+ }
return super.areSerializedFieldsEqual(other);
}
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComponentComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComponentComparator.java
index 8ce0092e750..6e0ff7edf52 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComponentComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryComponentComparator.java
@@ -86,9 +86,10 @@ public class BinaryComponentComparator extends ByteArrayComparable {
}
/**
+ * Parse a serialized representation of {@link BinaryComponentComparator}
* @param pbBytes A pb serialized {@link BinaryComponentComparator} instance
* @return An instance of {@link BinaryComponentComparator} made from bytes
- * @throws DeserializationException DeserializationException
+ * @throws DeserializationException if an error occurred
* @see #toByteArray
*/
public static BinaryComponentComparator parseFrom(final byte[] pbBytes)
@@ -103,9 +104,8 @@ public class BinaryComponentComparator extends ByteArrayComparable {
}
/**
- * @param other paramemter to compare against
- * @return true if and only if the fields of the comparator that are serialized are equal to the
- * corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the comparator that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(ByteArrayComparable other) {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java
index fcf447dcb0c..b9cbcc0826b 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BinaryPrefixComparator.java
@@ -68,9 +68,11 @@ public class BinaryPrefixComparator extends ByteArrayComparable {
}
/**
+ * Parse a serialized representation of {@link BinaryPrefixComparator}
* @param pbBytes A pb serialized {@link BinaryPrefixComparator} instance
- * @return An instance of {@link BinaryPrefixComparator} made from bytes
n * @see
- * #toByteArray
+ * @return An instance of {@link BinaryPrefixComparator} made from bytes
+ * @throws DeserializationException if an error occurred
+ * @see #toByteArray
*/
public static BinaryPrefixComparator parseFrom(final byte[] pbBytes)
throws DeserializationException {
@@ -84,14 +86,17 @@ public class BinaryPrefixComparator extends ByteArrayComparable {
}
/**
- * n * @return true if and only if the fields of the comparator that are serialized are equal to
- * the corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the comparator that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(ByteArrayComparable other) {
- if (other == this) return true;
- if (!(other instanceof BinaryPrefixComparator)) return false;
-
+ if (other == this) {
+ return true;
+ }
+ if (!(other instanceof BinaryPrefixComparator)) {
+ return false;
+ }
return super.areSerializedFieldsEqual(other);
}
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java
index 8abdcf4e8a6..d19738d5357 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/BitComparator.java
@@ -74,8 +74,11 @@ public class BitComparator extends ByteArrayComparable {
}
/**
+ * Parse a serialized representation of {@link BitComparator}
* @param pbBytes A pb serialized {@link BitComparator} instance
- * @return An instance of {@link BitComparator} made from bytes
n * @see #toByteArray
+ * @return An instance of {@link BitComparator} made from bytes
+ * @throws DeserializationException if an error occurred
+ * @see #toByteArray
*/
public static BitComparator parseFrom(final byte[] pbBytes) throws DeserializationException {
ComparatorProtos.BitComparator proto;
@@ -89,14 +92,17 @@ public class BitComparator extends ByteArrayComparable {
}
/**
- * n * @return true if and only if the fields of the comparator that are serialized are equal to
- * the corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the comparator that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(ByteArrayComparable other) {
- if (other == this) return true;
- if (!(other instanceof BitComparator)) return false;
-
+ if (other == this) {
+ return true;
+ }
+ if (!(other instanceof BitComparator)) {
+ return false;
+ }
BitComparator comparator = (BitComparator) other;
return super.areSerializedFieldsEqual(other)
&& this.getOperator().equals(comparator.getOperator());
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
index 513f4b1e2db..af11ac5f81a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnCountGetFilter.java
@@ -88,8 +88,10 @@ public class ColumnCountGetFilter extends FilterBase {
}
/**
+ * Parse a serialized representation of {@link ColumnCountGetFilter}
* @param pbBytes A pb serialized {@link ColumnCountGetFilter} instance
* @return An instance of {@link ColumnCountGetFilter} made from bytes
+ * @throws DeserializationException if an error occurred
* @see #toByteArray
*/
public static ColumnCountGetFilter parseFrom(final byte[] pbBytes)
@@ -104,15 +106,17 @@ public class ColumnCountGetFilter extends FilterBase {
}
/**
- * @param o the other filter to compare with
- * @return true if and only if the fields of the filter that are serialized are equal to the
- * corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the filter that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(Filter o) {
- if (o == this) return true;
- if (!(o instanceof ColumnCountGetFilter)) return false;
-
+ if (o == this) {
+ return true;
+ }
+ if (!(o instanceof ColumnCountGetFilter)) {
+ return false;
+ }
ColumnCountGetFilter other = (ColumnCountGetFilter) o;
return this.getLimit() == other.getLimit();
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
index 94f7d76682c..88f18100bbd 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPaginationFilter.java
@@ -78,23 +78,14 @@ public class ColumnPaginationFilter extends FilterBase {
this.columnOffset = columnOffset;
}
- /**
- * n
- */
public int getLimit() {
return limit;
}
- /**
- * n
- */
public int getOffset() {
return offset;
}
- /**
- * n
- */
public byte[] getColumnOffset() {
return columnOffset;
}
@@ -167,9 +158,11 @@ public class ColumnPaginationFilter extends FilterBase {
}
/**
+ * Parse a serialized representation of {@link ColumnPaginationFilter}
* @param pbBytes A pb serialized {@link ColumnPaginationFilter} instance
- * @return An instance of {@link ColumnPaginationFilter} made from bytes
n * @see
- * #toByteArray
+ * @return An instance of {@link ColumnPaginationFilter} made from bytes
+ * @throws DeserializationException if an error occurred
+ * @see #toByteArray
*/
public static ColumnPaginationFilter parseFrom(final byte[] pbBytes)
throws DeserializationException {
@@ -186,15 +179,17 @@ public class ColumnPaginationFilter extends FilterBase {
}
/**
- * @param o the other filter to compare with
- * @return true if and only if the fields of the filter that are serialized are equal to the
- * corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the filter that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(Filter o) {
- if (o == this) return true;
- if (!(o instanceof ColumnPaginationFilter)) return false;
-
+ if (o == this) {
+ return true;
+ }
+ if (!(o instanceof ColumnPaginationFilter)) {
+ return false;
+ }
ColumnPaginationFilter other = (ColumnPaginationFilter) o;
if (this.columnOffset != null) {
return this.getLimit() == other.getLimit()
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
index 5ca97e880ba..3b8df1d15c6 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnPrefixFilter.java
@@ -111,9 +111,10 @@ public class ColumnPrefixFilter extends FilterBase {
}
/**
+ * Parses a serialized representation of the {@link ColumnPrefixFilter}
* @param pbBytes A pb serialized {@link ColumnPrefixFilter} instance
* @return An instance of {@link ColumnPrefixFilter} made from bytes
- * @throws org.apache.hadoop.hbase.exceptions.DeserializationException
+ * @throws DeserializationException if an error occurred
* @see #toByteArray
*/
public static ColumnPrefixFilter parseFrom(final byte[] pbBytes) throws DeserializationException {
@@ -127,15 +128,17 @@ public class ColumnPrefixFilter extends FilterBase {
}
/**
- * @param o the other filter to compare with
- * @return true if and only if the fields of the filter that are serialized are equal to the
- * corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the filter that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(Filter o) {
- if (o == this) return true;
- if (!(o instanceof ColumnPrefixFilter)) return false;
-
+ if (o == this) {
+ return true;
+ }
+ if (!(o instanceof ColumnPrefixFilter)) {
+ return false;
+ }
ColumnPrefixFilter other = (ColumnPrefixFilter) o;
return Bytes.equals(this.getPrefix(), other.getPrefix());
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
index 2aafd300e03..c9a7902d1e4 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnRangeFilter.java
@@ -157,9 +157,11 @@ public class ColumnRangeFilter extends FilterBase {
}
/**
+ * Parse a serialized representation of {@link ColumnRangeFilter}
* @param pbBytes A pb serialized {@link ColumnRangeFilter} instance
- * @return An instance of {@link ColumnRangeFilter} made from bytes
n * @see
- * #toByteArray
+ * @return An instance of {@link ColumnRangeFilter} made from bytes
+ * @throws DeserializationException if an error occurred
+ * @see #toByteArray
*/
public static ColumnRangeFilter parseFrom(final byte[] pbBytes) throws DeserializationException {
FilterProtos.ColumnRangeFilter proto;
@@ -175,9 +177,8 @@ public class ColumnRangeFilter extends FilterBase {
}
/**
- * @param o filter to serialize.
- * @return true if and only if the fields of the filter that are serialized are equal to the
- * corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the filter that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(Filter o) {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java
index 0b3b85dbb47..e7c06d44aef 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ColumnValueFilter.java
@@ -168,10 +168,11 @@ public class ColumnValueFilter extends FilterBase {
}
/**
- * Parse protobuf bytes to a ColumnValueFilter
- * @param pbBytes pbBytes
- * @return a ColumnValueFilter
- * @throws DeserializationException deserialization exception
+ * Parse a serialized representation of {@link ColumnValueFilter}
+ * @param pbBytes A pb serialized {@link ColumnValueFilter} instance
+ * @return An instance of {@link ColumnValueFilter} made from bytes
+ * @throws DeserializationException if an error occurred
+ * @see #toByteArray
*/
public static ColumnValueFilter parseFrom(final byte[] pbBytes) throws DeserializationException {
FilterProtos.ColumnValueFilter proto;
@@ -198,6 +199,10 @@ public class ColumnValueFilter extends FilterBase {
return convert().toByteArray();
}
+ /**
+ * Returns true if and only if the fields of the filter that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
+ */
@Override
boolean areSerializedFieldsEqual(Filter o) {
if (o == this) {
@@ -205,7 +210,6 @@ public class ColumnValueFilter extends FilterBase {
} else if (!(o instanceof ColumnValueFilter)) {
return false;
}
-
ColumnValueFilter other = (ColumnValueFilter) o;
return Bytes.equals(this.getFamily(), other.getFamily())
&& Bytes.equals(this.getQualifier(), other.getQualifier())
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java
index 8e994a9feb5..9cbd81b678a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/CompareFilter.java
@@ -136,7 +136,7 @@ public abstract class CompareFilter extends FilterBase {
}
}
- // returns an array of heterogeneous objects
+ /** Returns an array of heterogeneous objects */
public static ArrayList extractArguments(ArrayList filterArguments) {
Preconditions.checkArgument(filterArguments.size() == 2, "Expected 2 but got: %s",
filterArguments.size());
@@ -166,13 +166,17 @@ public abstract class CompareFilter extends FilterBase {
}
/**
- * n * @return true if and only if the fields of the filter that are serialized are equal to the
+ * Returns true if and only if the fields of the filter that are serialized are equal to the
* corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(Filter o) {
- if (o == this) return true;
- if (!(o instanceof CompareFilter)) return false;
+ if (o == this) {
+ return true;
+ }
+ if (!(o instanceof CompareFilter)) {
+ return false;
+ }
CompareFilter other = (CompareFilter) o;
return this.getCompareOperator().equals(other.getCompareOperator())
&& (this.getComparator() == other.getComparator()
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
index da613c7f3d0..3d052fc68aa 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/DependentColumnFilter.java
@@ -199,9 +199,11 @@ public class DependentColumnFilter extends CompareFilter {
}
/**
+ * Parse a seralized representation of {@link DependentColumnFilter}
* @param pbBytes A pb serialized {@link DependentColumnFilter} instance
- * @return An instance of {@link DependentColumnFilter} made from bytes
n * @see
- * #toByteArray
+ * @return An instance of {@link DependentColumnFilter} made from bytes
+ * @throws DeserializationException if an error occurred
+ * @see #toByteArray
*/
public static DependentColumnFilter parseFrom(final byte[] pbBytes)
throws DeserializationException {
@@ -228,16 +230,19 @@ public class DependentColumnFilter extends CompareFilter {
}
/**
- * n * @return true if and only if the fields of the filter that are serialized are equal to the
+ * Returns true if and only if the fields of the filter that are serialized are equal to the
* corresponding fields in other. Used for testing.
*/
@edu.umd.cs.findbugs.annotations.SuppressWarnings(
value = "RCN_REDUNDANT_NULLCHECK_OF_NONNULL_VALUE")
@Override
boolean areSerializedFieldsEqual(Filter o) {
- if (o == this) return true;
- if (!(o instanceof DependentColumnFilter)) return false;
-
+ if (o == this) {
+ return true;
+ }
+ if (!(o instanceof DependentColumnFilter)) {
+ return false;
+ }
DependentColumnFilter other = (DependentColumnFilter) o;
return other != null && super.areSerializedFieldsEqual(other)
&& Bytes.equals(this.getFamily(), other.getFamily())
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java
index 96122e84dde..73f859f76ae 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FamilyFilter.java
@@ -82,8 +82,11 @@ public class FamilyFilter extends CompareFilter {
}
/**
+ * Parse the serialized representation of {@link FamilyFilter}
* @param pbBytes A pb serialized {@link FamilyFilter} instance
- * @return An instance of {@link FamilyFilter} made from bytes
n * @see #toByteArray
+ * @return An instance of {@link FamilyFilter} made from bytes
+ * @throws DeserializationException if an error occurred
+ * @see #toByteArray
*/
public static FamilyFilter parseFrom(final byte[] pbBytes) throws DeserializationException {
FilterProtos.FamilyFilter proto;
@@ -106,14 +109,17 @@ public class FamilyFilter extends CompareFilter {
}
/**
- * @return true if and only if the fields of the filter that are serialized are equal to the
- * corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the filter that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(Filter o) {
- if (o == this) return true;
- if (!(o instanceof FamilyFilter)) return false;
-
+ if (o == this) {
+ return true;
+ }
+ if (!(o instanceof FamilyFilter)) {
+ return false;
+ }
FamilyFilter other = (FamilyFilter) o;
return super.areSerializedFieldsEqual(other);
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
index 2c623306ba0..a5f5efcaba1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/Filter.java
@@ -205,7 +205,9 @@ public abstract class Filter {
* Concrete implementers can signal a failure condition in their code by throwing an
* {@link IOException}.
* @param pbBytes A pb serialized {@link Filter} instance
- * @return An instance of {@link Filter} made from bytes
n * @see #toByteArray
+ * @return An instance of {@link Filter} made from bytes
+ * @throws DeserializationException if an error occurred
+ * @see #toByteArray
*/
public static Filter parseFrom(final byte[] pbBytes) throws DeserializationException {
throw new DeserializationException(
@@ -216,7 +218,6 @@ public abstract class Filter {
* Concrete implementers can signal a failure condition in their code by throwing an
* {@link IOException}. n * @return true if and only if the fields of the filter that are
* serialized are equal to the corresponding fields in other. Used for testing.
- * @throws IOException in case an I/O or an filter specific failure needs to be signaled.
*/
abstract boolean areSerializedFieldsEqual(Filter other);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
index 8dc66182d8c..3b7c136c6e1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterList.java
@@ -194,8 +194,11 @@ final public class FilterList extends FilterBase {
}
/**
+ * Parse a seralized representation of {@link FilterList}
* @param pbBytes A pb serialized {@link FilterList} instance
- * @return An instance of {@link FilterList} made from bytes
n * @see #toByteArray
+ * @return An instance of {@link FilterList} made from bytes
+ * @throws DeserializationException if an error occurred
+ * @see #toByteArray
*/
public static FilterList parseFrom(final byte[] pbBytes) throws DeserializationException {
FilterProtos.FilterList proto;
@@ -218,14 +221,17 @@ final public class FilterList extends FilterBase {
}
/**
- * n * @return true if and only if the fields of the filter that are serialized are equal to the
+ * Returns true if and only if the fields of the filter that are serialized are equal to the
* corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(Filter other) {
- if (other == this) return true;
- if (!(other instanceof FilterList)) return false;
-
+ if (other == this) {
+ return true;
+ }
+ if (!(other instanceof FilterList)) {
+ return false;
+ }
FilterList o = (FilterList) other;
return this.getOperator().equals(o.getOperator())
&& ((this.getFilters() == o.getFilters()) || this.getFilters().equals(o.getFilters()));
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java
index 760b79d497d..45e06f44454 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithAND.java
@@ -270,12 +270,12 @@ public class FilterListWithAND extends FilterListBase {
@Override
public boolean equals(Object obj) {
- if (!(obj instanceof FilterListWithAND)) {
- return false;
- }
if (this == obj) {
return true;
}
+ if (!(obj instanceof FilterListWithAND)) {
+ return false;
+ }
FilterListWithAND f = (FilterListWithAND) obj;
return this.filters.equals(f.getFilters()) && this.seekHintFilters.equals(f.seekHintFilters);
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java
index dd50a1bbb8d..fbe68ab1352 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FilterListWithOR.java
@@ -394,12 +394,12 @@ public class FilterListWithOR extends FilterListBase {
@Override
public boolean equals(Object obj) {
- if (obj == null || (!(obj instanceof FilterListWithOR))) {
- return false;
- }
if (this == obj) {
return true;
}
+ if (!(obj instanceof FilterListWithOR)) {
+ return false;
+ }
FilterListWithOR f = (FilterListWithOR) obj;
return this.filters.equals(f.getFilters()) && this.prevFilterRCList.equals(f.prevFilterRCList)
&& this.prevCellList.equals(f.prevCellList);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java
index d4797aa15e7..6f5e04bbd8d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyOnlyFilter.java
@@ -19,7 +19,6 @@ package org.apache.hadoop.hbase.filter;
import java.io.IOException;
import java.util.ArrayList;
-import java.util.Objects;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.exceptions.DeserializationException;
import org.apache.yetus.audience.InterfaceAudience;
@@ -71,6 +70,7 @@ public class FirstKeyOnlyFilter extends FilterBase {
}
/**
+ * Set or clear the indication if the first KV has been found.
* @param value update {@link #foundKV} flag with value.
*/
protected void setFoundKV(boolean value) {
@@ -85,9 +85,10 @@ public class FirstKeyOnlyFilter extends FilterBase {
}
/**
+ * Parse a serialized representation of {@link FirstKeyOnlyFilter}
* @param pbBytes A pb serialized {@link FirstKeyOnlyFilter} instance
* @return An instance of {@link FirstKeyOnlyFilter} made from bytes
- * @throws org.apache.hadoop.hbase.exceptions.DeserializationException
+ * @throws DeserializationException if an error occurred
* @see #toByteArray
*/
public static FirstKeyOnlyFilter parseFrom(final byte[] pbBytes) throws DeserializationException {
@@ -102,15 +103,17 @@ public class FirstKeyOnlyFilter extends FilterBase {
}
/**
- * @param o the other filter to compare with
- * @return true if and only if the fields of the filter that are serialized are equal to the
- * corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the filter that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(Filter o) {
- if (o == this) return true;
- if (!(o instanceof FirstKeyOnlyFilter)) return false;
-
+ if (o == this) {
+ return true;
+ }
+ if (!(o instanceof FirstKeyOnlyFilter)) {
+ return false;
+ }
return true;
}
@@ -121,6 +124,6 @@ public class FirstKeyOnlyFilter extends FilterBase {
@Override
public int hashCode() {
- return Objects.hashCode(foundKV);
+ return Boolean.hashCode(foundKV);
}
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java
index cec4a2f06ff..67354b304f5 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FirstKeyValueMatchingQualifiersFilter.java
@@ -32,9 +32,12 @@ import org.apache.yetus.audience.InterfaceAudience;
public class FirstKeyValueMatchingQualifiersFilter extends FirstKeyOnlyFilter {
/**
+ * Parses a serialized representation of {@link FirstKeyValueMatchingQualifiersFilter}
* @param pbBytes A pb serialized {@link FirstKeyValueMatchingQualifiersFilter} instance
* @return An instance of {@link FirstKeyValueMatchingQualifiersFilter} made from
- * bytes
n * @see #toByteArray
+ * bytes
+ * @throws DeserializationException if an error occurred
+ * @see #toByteArray
*/
public static FirstKeyValueMatchingQualifiersFilter parseFrom(final byte[] pbBytes)
throws DeserializationException {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
index c9d9fd41832..1506eca5df6 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/FuzzyRowFilter.java
@@ -265,9 +265,11 @@ public class FuzzyRowFilter extends FilterBase {
}
/**
+ * Parse a serialized representation of {@link FuzzyRowFilter}
* @param pbBytes A pb serialized {@link FuzzyRowFilter} instance
- * @return An instance of {@link FuzzyRowFilter} made from bytes
n * @see
- * #toByteArray
+ * @return An instance of {@link FuzzyRowFilter} made from bytes
+ * @throws DeserializationException if an error occurred
+ * @see #toByteArray
*/
public static FuzzyRowFilter parseFrom(final byte[] pbBytes) throws DeserializationException {
FilterProtos.FuzzyRowFilter proto;
@@ -340,7 +342,7 @@ public class FuzzyRowFilter extends FilterBase {
long fuzzyBytes = Bytes.toLong(fuzzyKeyBytes, i);
long fuzzyMeta = Bytes.toLong(fuzzyKeyMeta, i);
long rowValue = Bytes.toLong(row, offset + i);
- if ((rowValue & fuzzyMeta) != (fuzzyBytes)) {
+ if ((rowValue & fuzzyMeta) != fuzzyBytes) {
// We always return NEXT_EXISTS
return SatisfiesCode.NEXT_EXISTS;
}
@@ -352,7 +354,7 @@ public class FuzzyRowFilter extends FilterBase {
int fuzzyBytes = Bytes.toInt(fuzzyKeyBytes, off);
int fuzzyMeta = Bytes.toInt(fuzzyKeyMeta, off);
int rowValue = Bytes.toInt(row, offset + off);
- if ((rowValue & fuzzyMeta) != (fuzzyBytes)) {
+ if ((rowValue & fuzzyMeta) != fuzzyBytes) {
// We always return NEXT_EXISTS
return SatisfiesCode.NEXT_EXISTS;
}
@@ -363,7 +365,7 @@ public class FuzzyRowFilter extends FilterBase {
short fuzzyBytes = Bytes.toShort(fuzzyKeyBytes, off);
short fuzzyMeta = Bytes.toShort(fuzzyKeyMeta, off);
short rowValue = Bytes.toShort(row, offset + off);
- if ((rowValue & fuzzyMeta) != (fuzzyBytes)) {
+ if ((rowValue & fuzzyMeta) != fuzzyBytes) {
// We always return NEXT_EXISTS
// even if it does not (in this case getNextForFuzzyRule
// will return null)
@@ -376,7 +378,7 @@ public class FuzzyRowFilter extends FilterBase {
int fuzzyBytes = fuzzyKeyBytes[off] & 0xff;
int fuzzyMeta = fuzzyKeyMeta[off] & 0xff;
int rowValue = row[offset + off] & 0xff;
- if ((rowValue & fuzzyMeta) != (fuzzyBytes)) {
+ if ((rowValue & fuzzyMeta) != fuzzyBytes) {
// We always return NEXT_EXISTS
return SatisfiesCode.NEXT_EXISTS;
}
@@ -603,14 +605,17 @@ public class FuzzyRowFilter extends FilterBase {
}
/**
- * @return true if and only if the fields of the filter that are serialized are equal to the
- * corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the filter that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(Filter o) {
- if (o == this) return true;
- if (!(o instanceof FuzzyRowFilter)) return false;
-
+ if (o == this) {
+ return true;
+ }
+ if (!(o instanceof FuzzyRowFilter)) {
+ return false;
+ }
FuzzyRowFilter other = (FuzzyRowFilter) o;
if (this.fuzzyKeysData.size() != other.fuzzyKeysData.size()) return false;
for (int i = 0; i < fuzzyKeysData.size(); ++i) {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
index 42761712034..a9d4e780688 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/InclusiveStopFilter.java
@@ -86,9 +86,11 @@ public class InclusiveStopFilter extends FilterBase {
}
/**
+ * Parse a serialized representation of {@link InclusiveStopFilter}
* @param pbBytes A pb serialized {@link InclusiveStopFilter} instance
- * @return An instance of {@link InclusiveStopFilter} made from bytes
n * @see
- * #toByteArray
+ * @return An instance of {@link InclusiveStopFilter} made from bytes
+ * @throws DeserializationException if an error occurred
+ * @see #toByteArray
*/
public static InclusiveStopFilter parseFrom(final byte[] pbBytes)
throws DeserializationException {
@@ -103,15 +105,17 @@ public class InclusiveStopFilter extends FilterBase {
}
/**
- * @param o the other filter to compare with
- * @return true if and only if the fields of the filter that are serialized are equal to the
- * corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the filter that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(Filter o) {
- if (o == this) return true;
- if (!(o instanceof InclusiveStopFilter)) return false;
-
+ if (o == this) {
+ return true;
+ }
+ if (!(o instanceof InclusiveStopFilter)) {
+ return false;
+ }
InclusiveStopFilter other = (InclusiveStopFilter) o;
return Bytes.equals(this.getStopRowKey(), other.getStopRowKey());
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
index 414818a3afc..e2711a774aa 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/KeyOnlyFilter.java
@@ -102,8 +102,11 @@ public class KeyOnlyFilter extends FilterBase {
}
/**
+ * Parse a serialized representation of {@link KeyOnlyFilter}
* @param pbBytes A pb serialized {@link KeyOnlyFilter} instance
- * @return An instance of {@link KeyOnlyFilter} made from bytes
n * @see #toByteArray
+ * @return An instance of {@link KeyOnlyFilter} made from bytes
+ * @throws DeserializationException if an error occurred
+ * @see #toByteArray
*/
public static KeyOnlyFilter parseFrom(final byte[] pbBytes) throws DeserializationException {
FilterProtos.KeyOnlyFilter proto;
@@ -116,15 +119,17 @@ public class KeyOnlyFilter extends FilterBase {
}
/**
- * @param o the other filter to compare with
- * @return true if and only if the fields of the filter that are serialized are equal to the
- * corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the filter that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(Filter o) {
- if (o == this) return true;
- if (!(o instanceof KeyOnlyFilter)) return false;
-
+ if (o == this) {
+ return true;
+ }
+ if (!(o instanceof KeyOnlyFilter)) {
+ return false;
+ }
KeyOnlyFilter other = (KeyOnlyFilter) o;
return this.lenAsVal == other.lenAsVal;
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/LongComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/LongComparator.java
index 8f9a7f1dfc3..c71afa098fe 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/LongComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/LongComparator.java
@@ -62,9 +62,10 @@ public class LongComparator extends ByteArrayComparable {
}
/**
+ * Parses a serialized representation of {@link LongComparator}
* @param pbBytes A pb serialized {@link LongComparator} instance
* @return An instance of {@link LongComparator} made from bytes
- * @throws org.apache.hadoop.hbase.exceptions.DeserializationException
+ * @throws DeserializationException if an error occurred
* @see #toByteArray
*/
public static LongComparator parseFrom(final byte[] pbBytes) throws DeserializationException {
@@ -78,11 +79,16 @@ public class LongComparator extends ByteArrayComparable {
}
/**
- * n * @return true if and only if the fields of the comparator that are serialized are equal to
- * the corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the comparator that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
boolean areSerializedFieldsEqual(LongComparator other) {
- if (other == this) return true;
+ if (other == this) {
+ return true;
+ }
+ if (other == null) {
+ return false;
+ }
return super.areSerializedFieldsEqual(other);
}
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
index c4676910c14..85fae7b0a99 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultiRowRangeFilter.java
@@ -197,9 +197,11 @@ public class MultiRowRangeFilter extends FilterBase {
}
/**
+ * Parse a serialized representation of {@link MultiRowRangeFilter}
* @param pbBytes A pb serialized instance
- * @return An instance of MultiRowRangeFilter
- * @throws org.apache.hadoop.hbase.exceptions.DeserializationException
+ * @return An instance of {@link MultiRowRangeFilter}
+ * @throws DeserializationException if an error occurred
+ * @see #toByteArray
*/
public static MultiRowRangeFilter parseFrom(final byte[] pbBytes)
throws DeserializationException {
@@ -224,15 +226,17 @@ public class MultiRowRangeFilter extends FilterBase {
}
/**
- * @param o the filter to compare
- * @return true if and only if the fields of the filter that are serialized are equal to the
- * corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the filter that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(Filter o) {
- if (o == this) return true;
- if (!(o instanceof MultiRowRangeFilter)) return false;
-
+ if (o == this) {
+ return true;
+ }
+ if (!(o instanceof MultiRowRangeFilter)) {
+ return false;
+ }
MultiRowRangeFilter other = (MultiRowRangeFilter) o;
if (this.rangeList.size() != other.rangeList.size()) return false;
for (int i = 0; i < rangeList.size(); ++i) {
@@ -719,7 +723,7 @@ public class MultiRowRangeFilter extends FilterBase {
/**
* Gets the RowRange at the given offset.
*/
- @SuppressWarnings("unchecked")
+ @SuppressWarnings({ "unchecked", "TypeParameterUnusedInFormals" })
public T get(int i) {
return (T) ranges.get(i);
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
index 4a969c0006e..168257cd2f7 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/MultipleColumnPrefixFilter.java
@@ -128,9 +128,11 @@ public class MultipleColumnPrefixFilter extends FilterBase {
}
/**
+ * Parse a serialized representation of {@link MultipleColumnPrefixFilter}
* @param pbBytes A pb serialized {@link MultipleColumnPrefixFilter} instance
- * @return An instance of {@link MultipleColumnPrefixFilter} made from bytes
n * @see
- * #toByteArray
+ * @return An instance of {@link MultipleColumnPrefixFilter} made from bytes
+ * @throws DeserializationException if an error occurred
+ * @see #toByteArray
*/
public static MultipleColumnPrefixFilter parseFrom(final byte[] pbBytes)
throws DeserializationException {
@@ -150,15 +152,17 @@ public class MultipleColumnPrefixFilter extends FilterBase {
}
/**
- * @param o the other filter to compare with
- * @return true if and only if the fields of the filter that are serialized are equal to the
- * corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the filter that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(Filter o) {
- if (o == this) return true;
- if (!(o instanceof MultipleColumnPrefixFilter)) return false;
-
+ if (o == this) {
+ return true;
+ }
+ if (!(o instanceof MultipleColumnPrefixFilter)) {
+ return false;
+ }
MultipleColumnPrefixFilter other = (MultipleColumnPrefixFilter) o;
return this.sortedPrefixes.equals(other.sortedPrefixes);
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
index 6153e1e8080..ea8a5abd354 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/NullComparator.java
@@ -71,9 +71,11 @@ public class NullComparator extends ByteArrayComparable {
}
/**
+ * Parse the serialized representation of {@link NullComparator}
* @param pbBytes A pb serialized {@link NullComparator} instance
- * @return An instance of {@link NullComparator} made from bytes
n * @see
- * #toByteArray
+ * @return An instance of {@link NullComparator} made from bytes
+ * @throws DeserializationException if an error occurred
+ * @see #toByteArray
*/
public static NullComparator parseFrom(final byte[] pbBytes) throws DeserializationException {
try {
@@ -86,14 +88,17 @@ public class NullComparator extends ByteArrayComparable {
}
/**
- * n * @return true if and only if the fields of the comparator that are serialized are equal to
- * the corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the comparator that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(ByteArrayComparable other) {
- if (other == this) return true;
- if (!(other instanceof NullComparator)) return false;
-
+ if (other == this) {
+ return true;
+ }
+ if (!(other instanceof NullComparator)) {
+ return false;
+ }
return super.areSerializedFieldsEqual(other);
}
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
index 087957fafc6..45c91921db8 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PageFilter.java
@@ -100,8 +100,11 @@ public class PageFilter extends FilterBase {
}
/**
+ * Parse a serialized representation of {@link PageFilter}
* @param pbBytes A pb serialized {@link PageFilter} instance
- * @return An instance of {@link PageFilter} made from bytes
n * @see #toByteArray
+ * @return An instance of {@link PageFilter} made from bytes
+ * @throws DeserializationException if an error occurred
+ * @see #toByteArray
*/
public static PageFilter parseFrom(final byte[] pbBytes) throws DeserializationException {
FilterProtos.PageFilter proto;
@@ -114,9 +117,8 @@ public class PageFilter extends FilterBase {
}
/**
- * @param o other Filter to compare with
- * @return true if and only if the fields of the filter that are serialized are equal to the
- * corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the filter that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(Filter o) {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java
index b08ce971c21..c9bd7a1a55c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ParseFilter.java
@@ -156,7 +156,7 @@ public class ParseFilter {
operatorStack.pop();
continue;
}
- while (!(argumentOnTopOfStack.equals(ParseConstants.LPAREN_BUFFER))) {
+ while (!argumentOnTopOfStack.equals(ParseConstants.LPAREN_BUFFER)) {
filterStack.push(popArguments(operatorStack, filterStack));
if (operatorStack.empty()) {
throw new IllegalArgumentException("Mismatched parenthesis");
@@ -367,7 +367,7 @@ public class ParseFilter {
public void reduce(Stack operatorStack, Stack filterStack,
ByteBuffer operator) {
while (
- !operatorStack.empty() && !(ParseConstants.LPAREN_BUFFER.equals(operatorStack.peek()))
+ !operatorStack.empty() && !ParseConstants.LPAREN_BUFFER.equals(operatorStack.peek())
&& hasHigherPriority(operatorStack.peek(), operator)
) {
filterStack.push(popArguments(operatorStack, filterStack));
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
index 407b92a8604..c6d76a2f0d1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/PrefixFilter.java
@@ -110,9 +110,10 @@ public class PrefixFilter extends FilterBase {
}
/**
+ * Parse a serialized representation of {@link PrefixFilter}
* @param pbBytes A pb serialized {@link PrefixFilter} instance
* @return An instance of {@link PrefixFilter} made from bytes
- * @throws org.apache.hadoop.hbase.exceptions.DeserializationException
+ * @throws DeserializationException if an error occurred
* @see #toByteArray
*/
public static PrefixFilter parseFrom(final byte[] pbBytes) throws DeserializationException {
@@ -126,15 +127,17 @@ public class PrefixFilter extends FilterBase {
}
/**
- * @param o the other filter to compare with
- * @return true if and only if the fields of the filter that are serialized are equal to the
- * corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the filter that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(Filter o) {
- if (o == this) return true;
- if (!(o instanceof PrefixFilter)) return false;
-
+ if (o == this) {
+ return true;
+ }
+ if (!(o instanceof PrefixFilter)) {
+ return false;
+ }
PrefixFilter other = (PrefixFilter) o;
return Bytes.equals(this.getPrefix(), other.getPrefix());
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java
index f8dc591b19a..e11e2ad4857 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/QualifierFilter.java
@@ -76,9 +76,10 @@ public class QualifierFilter extends CompareFilter {
}
/**
+ * Parse a serialized representation of {@link QualifierFilter}
* @param pbBytes A pb serialized {@link QualifierFilter} instance
* @return An instance of {@link QualifierFilter} made from bytes
- * @throws org.apache.hadoop.hbase.exceptions.DeserializationException
+ * @throws DeserializationException if an error occurred
* @see #toByteArray
*/
public static QualifierFilter parseFrom(final byte[] pbBytes) throws DeserializationException {
@@ -102,14 +103,17 @@ public class QualifierFilter extends CompareFilter {
}
/**
- * @return true if and only if the fields of the filter that are serialized are equal to the
- * corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the filter that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(Filter o) {
- if (o == this) return true;
- if (!(o instanceof QualifierFilter)) return false;
-
+ if (o == this) {
+ return true;
+ }
+ if (!(o instanceof QualifierFilter)) {
+ return false;
+ }
return super.areSerializedFieldsEqual(o);
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java
index 5340ecf3434..099f38026fe 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RandomRowFilter.java
@@ -107,9 +107,11 @@ public class RandomRowFilter extends FilterBase {
}
/**
+ * Parse a serialized representation of {@link RandomRowFilter}
* @param pbBytes A pb serialized {@link RandomRowFilter} instance
- * @return An instance of {@link RandomRowFilter} made from bytes
n * @see
- * #toByteArray
+ * @return An instance of {@link RandomRowFilter} made from bytes
+ * @throws DeserializationException if an error occurred
+ * @see #toByteArray
*/
public static RandomRowFilter parseFrom(final byte[] pbBytes) throws DeserializationException {
FilterProtos.RandomRowFilter proto;
@@ -122,15 +124,17 @@ public class RandomRowFilter extends FilterBase {
}
/**
- * @param o the other filter to compare with
- * @return true if and only if the fields of the filter that are serialized are equal to the
- * corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the filter that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(Filter o) {
- if (o == this) return true;
- if (!(o instanceof RandomRowFilter)) return false;
-
+ if (o == this) {
+ return true;
+ }
+ if (!(o instanceof RandomRowFilter)) {
+ return false;
+ }
RandomRowFilter other = (RandomRowFilter) o;
return this.getChance() == other.getChance();
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java
index 4bdc057bd9e..9efa563b432 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RegexStringComparator.java
@@ -152,9 +152,11 @@ public class RegexStringComparator extends ByteArrayComparable {
}
/**
+ * Parse a serialized representation of {@link RegexStringComparator}
* @param pbBytes A pb serialized {@link RegexStringComparator} instance
- * @return An instance of {@link RegexStringComparator} made from bytes
n * @see
- * #toByteArray
+ * @return An instance of {@link RegexStringComparator} made from bytes
+ * @throws DeserializationException if an error occurred
+ * @see #toByteArray
*/
public static RegexStringComparator parseFrom(final byte[] pbBytes)
throws DeserializationException {
@@ -183,13 +185,17 @@ public class RegexStringComparator extends ByteArrayComparable {
}
/**
- * n * @return true if and only if the fields of the comparator that are serialized are equal to
- * the corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the comparator that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(ByteArrayComparable other) {
- if (other == this) return true;
- if (!(other instanceof RegexStringComparator)) return false;
+ if (other == this) {
+ return true;
+ }
+ if (!(other instanceof RegexStringComparator)) {
+ return false;
+ }
RegexStringComparator comparator = (RegexStringComparator) other;
return super.areSerializedFieldsEqual(comparator)
&& engine.getClass().isInstance(comparator.getEngine())
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java
index c6c0099a9d0..017185670be 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/RowFilter.java
@@ -96,8 +96,11 @@ public class RowFilter extends CompareFilter {
}
/**
+ * Parse a serialized representation of {@link RowFilter}
* @param pbBytes A pb serialized {@link RowFilter} instance
- * @return An instance of {@link RowFilter} made from bytes
n * @see #toByteArray
+ * @return An instance of {@link RowFilter} made from bytes
+ * @throws DeserializationException if an error occurred
+ * @see #toByteArray
*/
public static RowFilter parseFrom(final byte[] pbBytes) throws DeserializationException {
FilterProtos.RowFilter proto;
@@ -120,14 +123,17 @@ public class RowFilter extends CompareFilter {
}
/**
- * @return true if and only if the fields of the filter that are serialized are equal to the
- * corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the filter that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(Filter o) {
- if (o == this) return true;
- if (!(o instanceof RowFilter)) return false;
-
+ if (o == this) {
+ return true;
+ }
+ if (!(o instanceof RowFilter)) {
+ return false;
+ }
return super.areSerializedFieldsEqual(o);
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java
index f9b80ee72c5..14bdc04a754 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueExcludeFilter.java
@@ -122,9 +122,11 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter {
}
/**
+ * Parse a serialized representation of {@link SingleColumnValueExcludeFilter}
* @param pbBytes A pb serialized {@link SingleColumnValueExcludeFilter} instance
- * @return An instance of {@link SingleColumnValueExcludeFilter} made from bytes
n
- * * @see #toByteArray
+ * @return An instance of {@link SingleColumnValueExcludeFilter} made from bytes
+ * @throws DeserializationException if an error occurred
+ * @see #toByteArray
*/
public static SingleColumnValueExcludeFilter parseFrom(final byte[] pbBytes)
throws DeserializationException {
@@ -151,14 +153,17 @@ public class SingleColumnValueExcludeFilter extends SingleColumnValueFilter {
}
/**
- * @return true if and only if the fields of the filter that are serialized are equal to the
- * corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the filter that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(Filter o) {
- if (o == this) return true;
- if (!(o instanceof SingleColumnValueExcludeFilter)) return false;
-
+ if (o == this) {
+ return true;
+ }
+ if (!(o instanceof SingleColumnValueExcludeFilter)) {
+ return false;
+ }
return super.areSerializedFieldsEqual(o);
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
index 9806f6387a0..7be5ce91405 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SingleColumnValueFilter.java
@@ -284,8 +284,10 @@ public class SingleColumnValueFilter extends FilterBase {
}
/**
+ * Parse a serialized representation of {@link SingleColumnValueFilter}
* @param pbBytes A pb serialized {@link SingleColumnValueFilter} instance
* @return An instance of {@link SingleColumnValueFilter} made from bytes
+ * @throws DeserializationException if an error occurred
* @see #toByteArray
*/
public static SingleColumnValueFilter parseFrom(final byte[] pbBytes)
@@ -312,8 +314,8 @@ public class SingleColumnValueFilter extends FilterBase {
}
/**
- * @return true if and only if the fields of the filter that are serialized are equal to the
- * corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the filter that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(Filter o) {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java
index 08e21fa70c2..a5149592f61 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SkipFilter.java
@@ -105,8 +105,11 @@ public class SkipFilter extends FilterBase {
}
/**
+ * Parse a serialized representation of {@link SkipFilter}
* @param pbBytes A pb serialized {@link SkipFilter} instance
- * @return An instance of {@link SkipFilter} made from bytes
n * @see #toByteArray
+ * @return An instance of {@link SkipFilter} made from bytes
+ * @throws DeserializationException if an error occurred
+ * @see #toByteArray
*/
public static SkipFilter parseFrom(final byte[] pbBytes) throws DeserializationException {
FilterProtos.SkipFilter proto;
@@ -123,15 +126,17 @@ public class SkipFilter extends FilterBase {
}
/**
- * @param o the other filter to compare with
- * @return true if and only if the fields of the filter that are serialized are equal to the
- * corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the filter that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(Filter o) {
- if (o == this) return true;
- if (!(o instanceof SkipFilter)) return false;
-
+ if (o == this) {
+ return true;
+ }
+ if (!(o instanceof SkipFilter)) {
+ return false;
+ }
SkipFilter other = (SkipFilter) o;
return getFilter().areSerializedFieldsEqual(other.getFilter());
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java
index b5fe22dca24..8312d3f673f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/SubstringComparator.java
@@ -76,9 +76,11 @@ public class SubstringComparator extends ByteArrayComparable {
}
/**
+ * Parse a serialized representation of {@link SubstringComparator}
* @param pbBytes A pb serialized {@link SubstringComparator} instance
- * @return An instance of {@link SubstringComparator} made from bytes
n * @see
- * #toByteArray
+ * @return An instance of {@link SubstringComparator} made from bytes
+ * @throws DeserializationException if an error occurred
+ * @see #toByteArray
*/
public static SubstringComparator parseFrom(final byte[] pbBytes)
throws DeserializationException {
@@ -92,14 +94,17 @@ public class SubstringComparator extends ByteArrayComparable {
}
/**
- * n * @return true if and only if the fields of the comparator that are serialized are equal to
- * the corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the comparator that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(ByteArrayComparable other) {
- if (other == this) return true;
- if (!(other instanceof SubstringComparator)) return false;
-
+ if (other == this) {
+ return true;
+ }
+ if (!(other instanceof SubstringComparator)) {
+ return false;
+ }
SubstringComparator comparator = (SubstringComparator) other;
return super.areSerializedFieldsEqual(comparator) && this.substr.equals(comparator.substr);
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java
index 853555ef511..b3f821d75e4 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/TimestampsFilter.java
@@ -167,8 +167,10 @@ public class TimestampsFilter extends FilterBase {
}
/**
+ * Parse a serialized representation of {@link TimestampsFilter}
* @param pbBytes A pb serialized {@link TimestampsFilter} instance
* @return An instance of {@link TimestampsFilter} made from bytes
+ * @throws DeserializationException if an error occurred
* @see #toByteArray
*/
public static TimestampsFilter parseFrom(final byte[] pbBytes) throws DeserializationException {
@@ -183,15 +185,17 @@ public class TimestampsFilter extends FilterBase {
}
/**
- * @param o the other filter to compare with
- * @return true if and only if the fields of the filter that are serialized are equal to the
- * corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the filter that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(Filter o) {
- if (o == this) return true;
- if (!(o instanceof TimestampsFilter)) return false;
-
+ if (o == this) {
+ return true;
+ }
+ if (!(o instanceof TimestampsFilter)) {
+ return false;
+ }
TimestampsFilter other = (TimestampsFilter) o;
return this.getTimestamps().equals(other.getTimestamps());
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java
index 90dda21eb52..0056a3aff0d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/ValueFilter.java
@@ -78,8 +78,11 @@ public class ValueFilter extends CompareFilter {
}
/**
+ * Parse a serialized representation of {@link ValueFilter}
* @param pbBytes A pb serialized {@link ValueFilter} instance
- * @return An instance of {@link ValueFilter} made from bytes
n * @see #toByteArray
+ * @return An instance of {@link ValueFilter} made from bytes
+ * @throws DeserializationException if an error occurred
+ * @see #toByteArray
*/
public static ValueFilter parseFrom(final byte[] pbBytes) throws DeserializationException {
FilterProtos.ValueFilter proto;
@@ -102,14 +105,17 @@ public class ValueFilter extends CompareFilter {
}
/**
- * @return true if and only if the fields of the filter that are serialized are equal to the
- * corresponding fields in other. Used for testing.
+ * Returns true if and only if the fields of the filter that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(Filter o) {
- if (o == this) return true;
- if (!(o instanceof ValueFilter)) return false;
-
+ if (o == this) {
+ return true;
+ }
+ if (!(o instanceof ValueFilter)) {
+ return false;
+ }
return super.areSerializedFieldsEqual(o);
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java
index 5d6842b15e4..65cd03042b0 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/WhileMatchFilter.java
@@ -103,9 +103,10 @@ public class WhileMatchFilter extends FilterBase {
}
/**
+ * Parse a serialized representation of {@link WhileMatchFilter}
* @param pbBytes A pb serialized {@link WhileMatchFilter} instance
* @return An instance of {@link WhileMatchFilter} made from bytes
- * @throws org.apache.hadoop.hbase.exceptions.DeserializationException
+ * @throws DeserializationException if an error occurred
* @see #toByteArray
*/
public static WhileMatchFilter parseFrom(final byte[] pbBytes) throws DeserializationException {
@@ -123,15 +124,17 @@ public class WhileMatchFilter extends FilterBase {
}
/**
- * @param o the other filter to compare with
- * @return true if and only if the fields of the filter that are serialized are equal to the
- * corresponding fields in other. Used for testing.
+ * Return true if and only if the fields of the filter that are serialized are equal to the
+ * corresponding fields in other. Used for testing.
*/
@Override
boolean areSerializedFieldsEqual(Filter o) {
- if (o == this) return true;
- if (!(o instanceof WhileMatchFilter)) return false;
-
+ if (o == this) {
+ return true;
+ }
+ if (!(o instanceof WhileMatchFilter)) {
+ return false;
+ }
WhileMatchFilter other = (WhileMatchFilter) o;
return getFilter().areSerializedFieldsEqual(other.getFilter());
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/package-info.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/package-info.java
index 9dea2548ea9..e4d3ab947da 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/package-info.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/filter/package-info.java
@@ -20,15 +20,14 @@
/**
* Provides row-level filters applied to HRegion scan results during calls to
* {@link org.apache.hadoop.hbase.client.ResultScanner#next()}.
-
-
-Filters run the extent of a table unless you wrap your filter in a
-{@link org.apache.hadoop.hbase.filter.WhileMatchFilter}.
-The latter returns as soon as the filter stops matching.
-
-Do not rely on filters carrying state across rows; its not reliable in current
-hbase as we have no handlers in place for when regions split, close or server
-crashes.
-
-*/
+ *
+ *
+ * Filters run the extent of a table unless you wrap your filter in a
+ * {@link org.apache.hadoop.hbase.filter.WhileMatchFilter}.
+ * The latter returns as soon as the filter stops matching.
+ *
+ * Do not rely on filters carrying state across rows; its not reliable in current
+ * hbase as we have no handlers in place for when regions split, close or server
+ * crashes.
+ */
package org.apache.hadoop.hbase.filter;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
index 7c0149ccb8a..9d557a98743 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/AbstractRpcClient.java
@@ -238,7 +238,8 @@ public abstract class AbstractRpcClient implements RpcC
return null;
}
try {
- return (Codec) Class.forName(className).getDeclaredConstructor().newInstance();
+ return Class.forName(className).asSubclass(Codec.class).getDeclaredConstructor()
+ .newInstance();
} catch (Exception e) {
throw new RuntimeException("Failed getting codec " + className, e);
}
@@ -265,7 +266,8 @@ public abstract class AbstractRpcClient implements RpcC
return null;
}
try {
- return (CompressionCodec) Class.forName(className).getDeclaredConstructor().newInstance();
+ return Class.forName(className).asSubclass(CompressionCodec.class).getDeclaredConstructor()
+ .newInstance();
} catch (Exception e) {
throw new RuntimeException("Failed getting compressor " + className, e);
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java
index c8adc6a8cc3..da032cbeb9d 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/BlockingRpcConnection.java
@@ -67,7 +67,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.protobuf.Message;
-import org.apache.hbase.thirdparty.com.google.protobuf.Message.Builder;
import org.apache.hbase.thirdparty.com.google.protobuf.RpcCallback;
import org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf;
import org.apache.hbase.thirdparty.io.netty.buffer.PooledByteBufAllocator;
@@ -181,6 +180,8 @@ class BlockingRpcConnection extends RpcConnection implements Runnable {
try {
BlockingRpcConnection.this.wait();
} catch (InterruptedException e) {
+ // Restore interrupt status
+ Thread.currentThread().interrupt();
}
// check if we need to quit, so continue the main loop instead of fallback.
continue;
@@ -333,6 +334,8 @@ class BlockingRpcConnection extends RpcConnection implements Runnable {
try {
wait(Math.min(this.rpcClient.minIdleTimeBeforeClose, 1000));
} catch (InterruptedException e) {
+ // Restore interrupt status
+ Thread.currentThread().interrupt();
}
}
}
@@ -685,7 +688,7 @@ class BlockingRpcConnection extends RpcConnection implements Runnable {
} else {
Message value = null;
if (call.responseDefaultType != null) {
- Builder builder = call.responseDefaultType.newBuilderForType();
+ Message.Builder builder = call.responseDefaultType.newBuilderForType();
ProtobufUtil.mergeDelimitedFrom(builder, in);
value = builder.build();
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CellBlockBuilder.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CellBlockBuilder.java
index 9e9c0688ece..b2b3698aa2c 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CellBlockBuilder.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/CellBlockBuilder.java
@@ -228,6 +228,7 @@ class CellBlockBuilder {
}
/**
+ * Create a cell scanner.
* @param codec to use for cellblock
* @param cellBlock to encode
* @return CellScanner to work against the content of cellBlock
@@ -248,6 +249,7 @@ class CellBlockBuilder {
}
/**
+ * Create a cell scanner using an existing bytebuff.
* @param codec to use for cellblock
* @param cellBlock ByteBuffer containing the cells written by the Codec. The buffer should be
* position()'ed at the start of the cell block and limit()'ed at the end.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ConnectionId.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ConnectionId.java
index 6cb9cddd9fe..4de82e0c12a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ConnectionId.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/ConnectionId.java
@@ -57,14 +57,18 @@ class ConnectionId {
}
@Override
+ @SuppressWarnings("ReferenceEquality")
public boolean equals(Object obj) {
- if (obj instanceof ConnectionId) {
- ConnectionId id = (ConnectionId) obj;
- return address.equals(id.address)
- && ((ticket != null && ticket.equals(id.ticket)) || (ticket == id.ticket))
- && Objects.equals(this.serviceName, id.serviceName);
+ if (obj == this) {
+ return true;
}
- return false;
+ if (!(obj instanceof ConnectionId)) {
+ return false;
+ }
+ ConnectionId id = (ConnectionId) obj;
+ return address.equals(id.address)
+ && ((ticket != null && ticket.equals(id.ticket)) || (ticket == id.ticket))
+ && Objects.equals(this.serviceName, id.serviceName);
}
@Override // simply use the default Object#hashcode() ?
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRpcController.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRpcController.java
index 01412631ef7..c60de7658f3 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRpcController.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/HBaseRpcController.java
@@ -50,12 +50,14 @@ public interface HBaseRpcController extends RpcController, CellScannable {
void setCellScanner(CellScanner cellScanner);
/**
+ * Set the priority for this operation.
* @param priority Priority for this request; should fall roughly in the range
* {@link HConstants#NORMAL_QOS} to {@link HConstants#HIGH_QOS}
*/
void setPriority(int priority);
/**
+ * Set the priority for this operation.
* @param tn Set priority based off the table we are going against.
*/
void setPriority(final TableName tn);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcDuplexHandler.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcDuplexHandler.java
index fe32189f81b..700093a3027 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcDuplexHandler.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/NettyRpcDuplexHandler.java
@@ -31,7 +31,6 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.apache.hbase.thirdparty.com.google.protobuf.Message;
-import org.apache.hbase.thirdparty.com.google.protobuf.Message.Builder;
import org.apache.hbase.thirdparty.com.google.protobuf.TextFormat;
import org.apache.hbase.thirdparty.io.netty.buffer.ByteBuf;
import org.apache.hbase.thirdparty.io.netty.buffer.ByteBufInputStream;
@@ -168,7 +167,7 @@ class NettyRpcDuplexHandler extends ChannelDuplexHandler {
}
Message value;
if (call.responseDefaultType != null) {
- Builder builder = call.responseDefaultType.newBuilderForType();
+ Message.Builder builder = call.responseDefaultType.newBuilderForType();
builder.mergeDelimitedFrom(in);
value = builder.build();
} else {
@@ -202,7 +201,7 @@ class NettyRpcDuplexHandler extends ChannelDuplexHandler {
}
}
- private void cleanupCalls(ChannelHandlerContext ctx, IOException error) {
+ private void cleanupCalls(IOException error) {
for (Call call : id2Call.values()) {
call.setException(error);
}
@@ -212,7 +211,7 @@ class NettyRpcDuplexHandler extends ChannelDuplexHandler {
@Override
public void channelInactive(ChannelHandlerContext ctx) throws Exception {
if (!id2Call.isEmpty()) {
- cleanupCalls(ctx, new ConnectionClosedException("Connection closed"));
+ cleanupCalls(new ConnectionClosedException("Connection closed"));
}
conn.shutdown();
ctx.fireChannelInactive();
@@ -221,7 +220,7 @@ class NettyRpcDuplexHandler extends ChannelDuplexHandler {
@Override
public void exceptionCaught(ChannelHandlerContext ctx, Throwable cause) {
if (!id2Call.isEmpty()) {
- cleanupCalls(ctx, IPCUtil.toIOE(cause));
+ cleanupCalls(IPCUtil.toIOE(cause));
}
conn.shutdown();
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java
index 6ecff49e52b..045216e8881 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/ipc/RpcClient.java
@@ -90,8 +90,8 @@ public interface RpcClient extends Closeable {
void close();
/**
- * @return true when this client uses a {@link org.apache.hadoop.hbase.codec.Codec} and so
- * supports cell blocks.
+ * Return true when this client uses a {@link org.apache.hadoop.hbase.codec.Codec} and so supports
+ * cell blocks.
*/
boolean hasCellBlockSupport();
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/master/RegionState.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/master/RegionState.java
index 2b0f2f4509e..a78ece64d7f 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/master/RegionState.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/master/RegionState.java
@@ -417,12 +417,13 @@ public class RegionState {
*/
@Override
public boolean equals(Object obj) {
- if (this == obj) return true;
- if (obj == null || getClass() != obj.getClass()) {
+ if (this == obj) {
+ return true;
+ }
+ if (!(obj instanceof RegionState)) {
return false;
}
RegionState tmp = (RegionState) obj;
-
return RegionInfo.COMPARATOR.compare(tmp.hri, hri) == 0 && tmp.state == state
&& ((serverName != null && serverName.equals(tmp.serverName))
|| (tmp.serverName == null && serverName == null));
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufMagic.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufMagic.java
index 15a196cbe5b..4a285ba40be 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufMagic.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/protobuf/ProtobufMagic.java
@@ -35,8 +35,7 @@ public class ProtobufMagic {
public static final byte[] PB_MAGIC = new byte[] { 'P', 'B', 'U', 'F' };
/**
- * @param bytes Bytes to check.
- * @return True if passed bytes
has {@link #PB_MAGIC} for a prefix.
+ * Returns True if passed bytes
has {@link #PB_MAGIC} for a prefix.
*/
public static boolean isPBMagicPrefix(final byte[] bytes) {
if (bytes == null) return false;
@@ -66,12 +65,7 @@ public class ProtobufMagic {
return length1 - length2;
}
- /**
- * @param bytes Bytes to check.
- * @param offset offset to start at
- * @param len length to use
- * @return True if passed bytes
has {@link #PB_MAGIC} for a prefix.
- */
+ /** Returns True if passed bytes
has {@link #PB_MAGIC} for a prefix. */
public static boolean isPBMagicPrefix(final byte[] bytes, int offset, int len) {
if (bytes == null || len < PB_MAGIC.length) return false;
return compareTo(PB_MAGIC, 0, PB_MAGIC.length, bytes, offset, PB_MAGIC.length) == 0;
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java
index 728959e0a0c..1dd5bf275bb 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/QuotaRetriever.java
@@ -19,8 +19,8 @@ package org.apache.hadoop.hbase.quotas;
import java.io.Closeable;
import java.io.IOException;
+import java.util.ArrayDeque;
import java.util.Iterator;
-import java.util.LinkedList;
import java.util.Objects;
import java.util.Queue;
import org.apache.hadoop.conf.Configuration;
@@ -43,7 +43,7 @@ import org.slf4j.LoggerFactory;
public class QuotaRetriever implements Closeable, Iterable {
private static final Logger LOG = LoggerFactory.getLogger(QuotaRetriever.class);
- private final Queue cache = new LinkedList<>();
+ private final Queue cache = new ArrayDeque<>();
private ResultScanner scanner;
/**
* Connection to use. Could pass one in and have this class use it but this class wants to be
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/SpaceLimitSettings.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/SpaceLimitSettings.java
index 878cbe871e5..4a230338598 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/SpaceLimitSettings.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/quotas/SpaceLimitSettings.java
@@ -23,7 +23,7 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;
import org.apache.hadoop.hbase.shaded.protobuf.ProtobufUtil;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest.Builder;
+import org.apache.hadoop.hbase.shaded.protobuf.generated.MasterProtos.SetQuotaRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceLimitRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.QuotaProtos.SpaceQuota;
@@ -113,7 +113,7 @@ class SpaceLimitSettings extends QuotaSettings {
}
@Override
- protected void setupSetQuotaRequest(Builder builder) {
+ protected void setupSetQuotaRequest(SetQuotaRequest.Builder builder) {
// TableName/Namespace are serialized in QuotaSettings
builder.setSpaceLimit(proto);
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
index c2dc1042c91..dfb0a49db4a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/SaslUtil.java
@@ -29,6 +29,8 @@ import org.apache.yetus.audience.InterfaceAudience;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
+import org.apache.hbase.thirdparty.com.google.common.base.Splitter;
+
@InterfaceAudience.Private
public class SaslUtil {
private static final Logger LOG = LoggerFactory.getLogger(SaslUtil.class);
@@ -62,7 +64,7 @@ public class SaslUtil {
/** Splitting fully qualified Kerberos name into parts */
public static String[] splitKerberosName(String fullName) {
- return fullName.split("[/@]");
+ return Splitter.onPattern("[/@]").splitToStream(fullName).toArray(String[]::new);
}
public static String encodeIdentifier(byte[] identifier) {
@@ -93,6 +95,7 @@ public class SaslUtil {
}
/**
+ * Initialize SASL properties for a given RPC protection level.
* @param rpcProtection Value of 'hbase.rpc.protection' configuration.
* @return Map with values for SASL properties.
*/
@@ -101,10 +104,9 @@ public class SaslUtil {
if (rpcProtection.isEmpty()) {
saslQop = QualityOfProtection.AUTHENTICATION.getSaslQop();
} else {
- String[] qops = rpcProtection.split(",");
StringBuilder saslQopBuilder = new StringBuilder();
- for (int i = 0; i < qops.length; ++i) {
- QualityOfProtection qop = getQop(qops[i]);
+ for (String s : Splitter.on(',').split(rpcProtection)) {
+ QualityOfProtection qop = getQop(s);
saslQopBuilder.append(",").append(qop.getSaslQop());
}
saslQop = saslQopBuilder.substring(1); // remove first ','
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java
index b6986b564ac..e30041d46c4 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlClient.java
@@ -29,16 +29,11 @@ import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.ZooKeeperConnectionException;
import org.apache.hadoop.hbase.client.Admin;
import org.apache.hadoop.hbase.client.Connection;
-import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.security.SecurityCapability;
-import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.yetus.audience.InterfaceAudience;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.AccessControlService.BlockingInterface;
-
/**
* Utility client for doing access control admin operations.
*/
@@ -67,17 +62,17 @@ public class AccessControlClient {
.contains(SecurityCapability.CELL_AUTHORIZATION);
}
- private static BlockingInterface getAccessControlServiceStub(Table ht) throws IOException {
- CoprocessorRpcChannel service = ht.coprocessorService(HConstants.EMPTY_START_ROW);
- BlockingInterface protocol = AccessControlProtos.AccessControlService.newBlockingStub(service);
- return protocol;
- }
-
/**
* Grants permission on the specified table for the specified user
- * @param connection The Connection instance to use nnnn * @param mergeExistingPermissions If set
- * to false, later granted permissions will override previous granted
- * permissions. otherwise, it'll merge with previous granted permissions. nn
+ * @param connection The Connection instance to use
+ * @param tableName the table name
+ * @param userName the user name
+ * @param family the column family
+ * @param qual the column qualifier
+ * @param mergeExistingPermissions If set to false, later granted permissions will override
+ * previous granted permissions. otherwise, it'll merge with
+ * previous granted permissions.
+ * @param actions the actions
*/
private static void grant(Connection connection, final TableName tableName, final String userName,
final byte[] family, final byte[] qual, boolean mergeExistingPermissions,
@@ -90,7 +85,12 @@ public class AccessControlClient {
/**
* Grants permission on the specified table for the specified user. If permissions for a specified
* user exists, later granted permissions will override previous granted permissions.
- * @param connection The Connection instance to use nnnnnn
+ * @param connection The Connection instance to use
+ * @param tableName the table name
+ * @param userName the user name
+ * @param family the column family
+ * @param qual the column qualifier
+ * @param actions the actions
*/
public static void grant(Connection connection, final TableName tableName, final String userName,
final byte[] family, final byte[] qual, final Permission.Action... actions) throws Throwable {
@@ -98,9 +98,14 @@ public class AccessControlClient {
}
/**
- * Grants permission on the specified namespace for the specified user. nnn * @param
- * mergeExistingPermissions If set to false, later granted permissions will override previous
- * granted permissions. otherwise, it'll merge with previous granted permissions. nn
+ * Grants permission on the specified namespace for the specified user.
+ * @param connection The Connection instance to use
+ * @param namespace the namespace
+ * @param userName the user name
+ * @param mergeExistingPermissions If set to false, later granted permissions will override
+ * previous granted permissions. otherwise, it'll merge with
+ * previous granted permissions.
+ * @param actions the actions
*/
private static void grant(Connection connection, final String namespace, final String userName,
boolean mergeExistingPermissions, final Permission.Action... actions) throws Throwable {
@@ -113,7 +118,10 @@ public class AccessControlClient {
* Grants permission on the specified namespace for the specified user. If permissions on the
* specified namespace exists, later granted permissions will override previous granted
* permissions.
- * @param connection The Connection instance to use nnnn
+ * @param connection The Connection instance to use
+ * @param namespace the namespace
+ * @param userName the user name
+ * @param actions the actions
*/
public static void grant(Connection connection, final String namespace, final String userName,
final Permission.Action... actions) throws Throwable {
@@ -121,9 +129,13 @@ public class AccessControlClient {
}
/**
- * Grant global permissions for the specified user. nn * @param mergeExistingPermissions If set to
- * false, later granted permissions will override previous granted permissions. otherwise, it'll
- * merge with previous granted permissions. nn
+ * Grant global permissions for the specified user.
+ * @param connection The Connection instance to use
+ * @param userName the user name
+ * @param mergeExistingPermissions If set to false, later granted permissions will override
+ * previous granted permissions. otherwise, it'll merge with
+ * previous granted permissions.
+ * @param actions the actions
*/
private static void grant(Connection connection, final String userName,
boolean mergeExistingPermissions, final Permission.Action... actions) throws Throwable {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlUtil.java
index 066e6f4e04d..e0eb79aa025 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/AccessControlUtil.java
@@ -42,9 +42,6 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.AccessControlService;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse;
-/**
- * @since 2.0.0
- */
@InterfaceAudience.Private
public class AccessControlUtil {
private AccessControlUtil() {
@@ -226,13 +223,12 @@ public class AccessControlUtil {
*/
public static TablePermission toTablePermission(AccessControlProtos.TablePermission proto) {
Permission.Action[] actions = toPermissionActions(proto.getActionList());
- TableName table = null;
- byte[] qualifier = null;
- byte[] family = null;
if (!proto.hasTableName()) {
throw new IllegalStateException("TableName cannot be empty");
}
- table = ProtobufUtil.toTableName(proto.getTableName());
+ TableName table = ProtobufUtil.toTableName(proto.getTableName());
+ byte[] family = null;
+ byte[] qualifier = null;
if (proto.hasFamily()) {
family = proto.getFamily().toByteArray();
}
@@ -265,13 +261,12 @@ public class AccessControlUtil {
if (proto.getType() == AccessControlProtos.Permission.Type.Table) {
AccessControlProtos.TablePermission perm = proto.getTablePermission();
Permission.Action[] actions = toPermissionActions(perm.getActionList());
- byte[] qualifier = null;
- byte[] family = null;
- TableName table = null;
if (!perm.hasTableName()) {
throw new IllegalStateException("TableName cannot be empty");
}
- table = ProtobufUtil.toTableName(perm.getTableName());
+ TableName table = ProtobufUtil.toTableName(perm.getTableName());
+ byte[] family = null;
+ byte[] qualifier = null;
if (perm.hasFamily()) {
family = perm.getFamily().toByteArray();
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/ShadedAccessControlUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/ShadedAccessControlUtil.java
index b6df2c94a04..cf7c797c98a 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/ShadedAccessControlUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/ShadedAccessControlUtil.java
@@ -21,7 +21,6 @@ import java.util.Collection;
import java.util.List;
import java.util.Map;
import org.apache.hadoop.hbase.TableName;
-import org.apache.hadoop.hbase.security.access.Permission.Action;
import org.apache.yetus.audience.InterfaceAudience;
import org.apache.hbase.thirdparty.com.google.common.collect.ArrayListMultimap;
@@ -32,7 +31,6 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GetUserPermissionsResponse;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.GrantRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.HasUserPermissionsRequest;
-import org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.Permission.Type;
import org.apache.hadoop.hbase.shaded.protobuf.generated.AccessControlProtos.RevokeRequest;
import org.apache.hadoop.hbase.shaded.protobuf.generated.HBaseProtos;
@@ -122,12 +120,12 @@ public class ShadedAccessControlUtil {
if (proto.getType() == AccessControlProtos.Permission.Type.Global) {
AccessControlProtos.GlobalPermission perm = proto.getGlobalPermission();
- Action[] actions = toPermissionActions(perm.getActionList());
+ Permission.Action[] actions = toPermissionActions(perm.getActionList());
return Permission.newBuilder().withActions(actions).build();
}
if (proto.getType() == AccessControlProtos.Permission.Type.Namespace) {
AccessControlProtos.NamespacePermission perm = proto.getNamespacePermission();
- Action[] actions = toPermissionActions(perm.getActionList());
+ Permission.Action[] actions = toPermissionActions(perm.getActionList());
if (!proto.hasNamespacePermission()) {
throw new IllegalStateException("Namespace must not be empty in NamespacePermission");
@@ -137,7 +135,7 @@ public class ShadedAccessControlUtil {
}
if (proto.getType() == AccessControlProtos.Permission.Type.Table) {
AccessControlProtos.TablePermission perm = proto.getTablePermission();
- Action[] actions = toPermissionActions(perm.getActionList());
+ Permission.Action[] actions = toPermissionActions(perm.getActionList());
byte[] qualifier = null;
byte[] family = null;
@@ -292,14 +290,14 @@ public class ShadedAccessControlUtil {
}
if (request.getNamespace() != null && !request.getNamespace().isEmpty()) {
builder.setNamespaceName(ByteString.copyFromUtf8(request.getNamespace()));
- builder.setType(Type.Namespace);
+ builder.setType(AccessControlProtos.Permission.Type.Namespace);
}
if (request.getTableName() != null) {
builder.setTableName(toProtoTableName(request.getTableName()));
- builder.setType(Type.Table);
+ builder.setType(AccessControlProtos.Permission.Type.Table);
}
if (!builder.hasType()) {
- builder.setType(Type.Global);
+ builder.setType(AccessControlProtos.Permission.Type.Global);
}
if (request.getFamily() != null && request.getFamily().length > 0) {
builder.setColumnFamily(ByteString.copyFrom(request.getFamily()));
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/TablePermission.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/TablePermission.java
index e0a12c7d431..65ebbef7da0 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/TablePermission.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/access/TablePermission.java
@@ -141,11 +141,11 @@ public class TablePermission extends Permission {
return false;
}
- if (family != null && !(CellUtil.matchingFamily(kv, family))) {
+ if (family != null && !CellUtil.matchingFamily(kv, family)) {
return false;
}
- if (qualifier != null && !(CellUtil.matchingQualifier(kv, qualifier))) {
+ if (qualifier != null && !CellUtil.matchingQualifier(kv, qualifier)) {
return false;
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
index 42508782d1a..7bae98d59ba 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/security/visibility/VisibilityClient.java
@@ -88,7 +88,7 @@ public class VisibilityClient {
for (String label : labels) {
if (label.length() > 0) {
VisibilityLabel.Builder newBuilder = VisibilityLabel.newBuilder();
- newBuilder.setLabel(UnsafeByteOperations.unsafeWrap((Bytes.toBytes(label))));
+ newBuilder.setLabel(UnsafeByteOperations.unsafeWrap(Bytes.toBytes(label)));
builder.addVisLabel(newBuilder.build());
}
}
@@ -117,8 +117,10 @@ public class VisibilityClient {
}
/**
- * @param connection the Connection instance to use. n * @return labels, the given user is
- * globally authorized for. n
+ * Get the authorization for a given user
+ * @param connection the Connection instance to use
+ * @param user the user
+ * @return labels the given user is globally authorized for
*/
public static GetAuthsResponse getAuths(Connection connection, final String user)
throws Throwable {
@@ -212,7 +214,7 @@ public class VisibilityClient {
setAuthReqBuilder.setUser(UnsafeByteOperations.unsafeWrap(Bytes.toBytes(user)));
for (String auth : auths) {
if (auth.length() > 0) {
- setAuthReqBuilder.addAuth((ByteString.copyFromUtf8(auth)));
+ setAuthReqBuilder.addAuth(ByteString.copyFromUtf8(auth));
}
}
if (setOrClear) {
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
index 27bc29702f8..35c361be562 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/ProtobufUtil.java
@@ -51,7 +51,6 @@ import org.apache.hadoop.hbase.ByteBufferExtendedCell;
import org.apache.hadoop.hbase.CacheEvictionStats;
import org.apache.hadoop.hbase.CacheEvictionStatsBuilder;
import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.Cell.Type;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellScanner;
import org.apache.hadoop.hbase.CellUtil;
@@ -309,25 +308,18 @@ public final class ProtobufUtil {
return Bytes.add(PB_MAGIC, bytes);
}
- /**
- * @param bytes Bytes to check.
- * @return True if passed bytes
has {@link ProtobufMagic#PB_MAGIC} for a prefix.
- */
+ /** Returns True if passed bytes
has {@link ProtobufMagic#PB_MAGIC} for a prefix. */
public static boolean isPBMagicPrefix(final byte[] bytes) {
return ProtobufMagic.isPBMagicPrefix(bytes);
}
- /**
- * @param bytes Bytes to check.
- * @param offset offset to start at
- * @param len length to use
- * @return True if passed bytes
has {@link ProtobufMagic#PB_MAGIC} for a prefix.
- */
+ /** Returns True if passed bytes
has {@link ProtobufMagic#PB_MAGIC} for a prefix. */
public static boolean isPBMagicPrefix(final byte[] bytes, int offset, int len) {
return ProtobufMagic.isPBMagicPrefix(bytes, offset, len);
}
/**
+ * Expect the {@link ProtobufMagic#PB_MAGIC} or throw an exception.
* @param bytes bytes to check
* @throws DeserializationException if we are missing the pb magic prefix
*/
@@ -732,7 +724,7 @@ public final class ProtobufUtil {
} else {
put.add(cellBuilder.clear().setRow(put.getRow()).setFamily(family)
.setQualifier(qv.hasQualifier() ? qv.getQualifier().toByteArray() : null)
- .setTimestamp(ts).setType(Type.Put)
+ .setTimestamp(ts).setType(Cell.Type.Put)
.setValue(qv.hasValue() ? qv.getValue().toByteArray() : null).build());
}
}
@@ -889,9 +881,9 @@ public final class ProtobufUtil {
}
/**
- * Convert a protocol buffer Mutate to an Append n * @param proto the protocol buffer Mutate to
- * convert
- * @return the converted client Append n
+ * Convert a protocol buffer Mutate to an Append
+ * @param proto the protocol buffer Mutate to convert
+ * @return the converted client Append
*/
public static Append toAppend(final MutationProto proto, final CellScanner cellScanner)
throws IOException {
@@ -909,7 +901,7 @@ public final class ProtobufUtil {
/**
* Convert a protocol buffer Mutate to an Increment
* @param proto the protocol buffer Mutate to convert
- * @return the converted client Increment n
+ * @return the converted client Increment
*/
public static Increment toIncrement(final MutationProto proto, final CellScanner cellScanner)
throws IOException {
@@ -1678,7 +1670,6 @@ public final class ProtobufUtil {
final RpcController controller, final AdminService.BlockingInterface admin,
final byte[] regionName) throws IOException {
try {
- GetRegionInfoRequest request = getGetRegionInfoRequest(regionName);
GetRegionInfoResponse response =
admin.getRegionInfo(controller, getGetRegionInfoRequest(regionName));
return toRegionInfo(response.getRegionInfo());
@@ -1840,9 +1831,6 @@ public final class ProtobufUtil {
return rl.getReadRequestsCount() + rl.getWriteRequestsCount();
}
- /**
- * @param m Message to get delimited pb serialization of (with pb magic prefix)
- */
public static byte[] toDelimitedByteArray(final Message m) throws IOException {
// Allocate arbitrary big size so we avoid resizing.
ByteArrayOutputStream baos = new ByteArrayOutputStream(4096);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
index 79efe9c8fa1..13e85f91c95 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/shaded/protobuf/RequestConverter.java
@@ -704,6 +704,7 @@ public final class RequestConverter {
}
/**
+ * Create a protocol buffer GetRegionInfoRequest,
* @param regionName the name of the region to get info
* @param includeCompactionState indicate if the compaction state is requested
* @param includeBestSplitRow indicate if the bestSplitRow is requested
@@ -912,7 +913,7 @@ public final class RequestConverter {
public static DeleteColumnRequest buildDeleteColumnRequest(final TableName tableName,
final byte[] columnName, final long nonceGroup, final long nonce) {
DeleteColumnRequest.Builder builder = DeleteColumnRequest.newBuilder();
- builder.setTableName(ProtobufUtil.toProtoTableName((tableName)));
+ builder.setTableName(ProtobufUtil.toProtoTableName(tableName));
builder.setColumnName(UnsafeByteOperations.unsafeWrap(columnName));
builder.setNonceGroup(nonceGroup);
builder.setNonce(nonce);
@@ -925,7 +926,7 @@ public final class RequestConverter {
public static ModifyColumnRequest buildModifyColumnRequest(final TableName tableName,
final ColumnFamilyDescriptor column, final long nonceGroup, final long nonce) {
ModifyColumnRequest.Builder builder = ModifyColumnRequest.newBuilder();
- builder.setTableName(ProtobufUtil.toProtoTableName((tableName)));
+ builder.setTableName(ProtobufUtil.toProtoTableName(tableName));
builder.setColumnFamilies(ProtobufUtil.toColumnFamilySchema(column));
builder.setNonceGroup(nonceGroup);
builder.setNonce(nonce);
@@ -937,7 +938,7 @@ public final class RequestConverter {
final long nonce) {
ModifyColumnStoreFileTrackerRequest.Builder builder =
ModifyColumnStoreFileTrackerRequest.newBuilder();
- builder.setTableName(ProtobufUtil.toProtoTableName((tableName)));
+ builder.setTableName(ProtobufUtil.toProtoTableName(tableName));
builder.setFamily(ByteString.copyFrom(family));
builder.setDstSft(dstSFT);
builder.setNonceGroup(nonceGroup);
@@ -1059,7 +1060,7 @@ public final class RequestConverter {
public static DisableTableRequest buildDisableTableRequest(final TableName tableName,
final long nonceGroup, final long nonce) {
DisableTableRequest.Builder builder = DisableTableRequest.newBuilder();
- builder.setTableName(ProtobufUtil.toProtoTableName((tableName)));
+ builder.setTableName(ProtobufUtil.toProtoTableName(tableName));
builder.setNonceGroup(nonceGroup);
builder.setNonce(nonce);
return builder.build();
@@ -1088,7 +1089,7 @@ public final class RequestConverter {
public static ModifyTableRequest buildModifyTableRequest(final TableName tableName,
final TableDescriptor tableDesc, final long nonceGroup, final long nonce) {
ModifyTableRequest.Builder builder = ModifyTableRequest.newBuilder();
- builder.setTableName(ProtobufUtil.toProtoTableName((tableName)));
+ builder.setTableName(ProtobufUtil.toProtoTableName(tableName));
builder.setTableSchema(ProtobufUtil.toTableSchema(tableDesc));
builder.setNonceGroup(nonceGroup);
builder.setNonce(nonce);
@@ -1099,7 +1100,7 @@ public final class RequestConverter {
final TableName tableName, final String dstSFT, final long nonceGroup, final long nonce) {
ModifyTableStoreFileTrackerRequest.Builder builder =
ModifyTableStoreFileTrackerRequest.newBuilder();
- builder.setTableName(ProtobufUtil.toProtoTableName((tableName)));
+ builder.setTableName(ProtobufUtil.toProtoTableName(tableName));
builder.setDstSft(dstSFT);
builder.setNonceGroup(nonceGroup);
builder.setNonce(nonce);
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/util/Writables.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/util/Writables.java
index 7ebbbf44ceb..091515c325e 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/util/Writables.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/util/Writables.java
@@ -34,6 +34,7 @@ import org.apache.yetus.audience.InterfaceAudience;
@InterfaceAudience.Private
public class Writables {
/**
+ * Get the Writable's contents as a byte array
* @param w writable
* @return The bytes of w
gotten by running its
* {@link Writable#write(java.io.DataOutput)} method.
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ReadOnlyZKClient.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ReadOnlyZKClient.java
index 96170736208..979094fda80 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ReadOnlyZKClient.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ReadOnlyZKClient.java
@@ -183,6 +183,8 @@ public final class ReadOnlyZKClient implements Closeable {
try {
zk.close();
} catch (InterruptedException e) {
+ // Restore interrupt status
+ Thread.currentThread().interrupt();
}
}
if (ZKTask.this.delay(retryIntervalMs, maxRetries)) {
@@ -303,6 +305,8 @@ public final class ReadOnlyZKClient implements Closeable {
try {
zookeeper.close();
} catch (InterruptedException e) {
+ // Restore interrupt status
+ Thread.currentThread().interrupt();
}
zookeeper = null;
}
diff --git a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKMetadata.java b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKMetadata.java
index f0fae958a66..b795ad45aa1 100644
--- a/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKMetadata.java
+++ b/hbase-client/src/main/java/org/apache/hadoop/hbase/zookeeper/ZKMetadata.java
@@ -54,7 +54,7 @@ public class ZKMetadata {
pos = Bytes.putInt(newData, pos, idLength);
pos = Bytes.putBytes(newData, pos, id, 0, id.length);
pos = Bytes.putBytes(newData, pos, salt, 0, salt.length);
- pos = Bytes.putBytes(newData, pos, data, 0, data.length);
+ Bytes.putBytes(newData, pos, data, 0, data.length);
return newData;
}
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableRpcPriority.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableRpcPriority.java
index db3ccebe89d..e57967ae721 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableRpcPriority.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableRpcPriority.java
@@ -45,7 +45,6 @@ import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.Cell.Type;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.HBaseClassTestRule;
@@ -135,10 +134,11 @@ public class TestAsyncTableRpcPriority {
case INCREMENT:
ColumnValue value = req.getColumnValue(0);
QualifierValue qvalue = value.getQualifierValue(0);
- Cell cell = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY).setType(Type.Put)
- .setRow(req.getRow().toByteArray()).setFamily(value.getFamily().toByteArray())
- .setQualifier(qvalue.getQualifier().toByteArray())
- .setValue(qvalue.getValue().toByteArray()).build();
+ Cell cell =
+ CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY).setType(Cell.Type.Put)
+ .setRow(req.getRow().toByteArray()).setFamily(value.getFamily().toByteArray())
+ .setQualifier(qvalue.getQualifier().toByteArray())
+ .setValue(qvalue.getValue().toByteArray()).build();
resp = MutateResponse.newBuilder()
.setResult(ProtobufUtil.toResult(Result.create(Arrays.asList(cell)))).build();
break;
@@ -496,8 +496,8 @@ public class TestAsyncTableRpcPriority {
assertFalse("close scanner should not come in with scan priority " + scanPriority,
req.hasCloseScanner() && req.getCloseScanner());
- Cell cell = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY).setType(Type.Put)
- .setRow(Bytes.toBytes(scanNextCalled.incrementAndGet()))
+ Cell cell = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
+ .setType(Cell.Type.Put).setRow(Bytes.toBytes(scanNextCalled.incrementAndGet()))
.setFamily(Bytes.toBytes("cf")).setQualifier(Bytes.toBytes("cq"))
.setValue(Bytes.toBytes("v")).build();
Result result = Result.create(Arrays.asList(cell));
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableTracing.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableTracing.java
index 99e52361109..f9b86221af1 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableTracing.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestAsyncTableTracing.java
@@ -54,7 +54,6 @@ import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.Cell.Type;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.HBaseClassTestRule;
@@ -135,8 +134,8 @@ public class TestAsyncTableTracing {
if (req.hasCloseScanner() && req.getCloseScanner()) {
done.run(ScanResponse.getDefaultInstance());
} else {
- Cell cell = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY).setType(Type.Put)
- .setRow(Bytes.toBytes(scanNextCalled.incrementAndGet()))
+ Cell cell = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY)
+ .setType(Cell.Type.Put).setRow(Bytes.toBytes(scanNextCalled.incrementAndGet()))
.setFamily(Bytes.toBytes("cf")).setQualifier(Bytes.toBytes("cq"))
.setValue(Bytes.toBytes("v")).build();
Result result = Result.create(Arrays.asList(cell));
@@ -183,10 +182,11 @@ public class TestAsyncTableTracing {
case INCREMENT:
ColumnValue value = req.getColumnValue(0);
QualifierValue qvalue = value.getQualifierValue(0);
- Cell cell = CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY).setType(Type.Put)
- .setRow(req.getRow().toByteArray()).setFamily(value.getFamily().toByteArray())
- .setQualifier(qvalue.getQualifier().toByteArray())
- .setValue(qvalue.getValue().toByteArray()).build();
+ Cell cell =
+ CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY).setType(Cell.Type.Put)
+ .setRow(req.getRow().toByteArray()).setFamily(value.getFamily().toByteArray())
+ .setQualifier(qvalue.getQualifier().toByteArray())
+ .setValue(qvalue.getValue().toByteArray()).build();
resp = MutateResponse.newBuilder()
.setResult(ProtobufUtil.toResult(Result.create(Arrays.asList(cell)))).build();
break;
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestCoprocessorDescriptor.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestCoprocessorDescriptor.java
index f1a8e000136..a56c863ce61 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestCoprocessorDescriptor.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestCoprocessorDescriptor.java
@@ -33,8 +33,6 @@ import org.junit.Rule;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.junit.rules.TestName;
-import org.slf4j.Logger;
-import org.slf4j.LoggerFactory;
@Category({ MiscTests.class, SmallTests.class })
public class TestCoprocessorDescriptor {
@@ -43,8 +41,6 @@ public class TestCoprocessorDescriptor {
public static final HBaseClassTestRule CLASS_RULE =
HBaseClassTestRule.forClass(TestCoprocessorDescriptor.class);
- private static final Logger LOG = LoggerFactory.getLogger(TestCoprocessorDescriptor.class);
-
@Rule
public TestName name = new TestName();
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestGet.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestGet.java
index 44a1c577b10..69c33c833b0 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestGet.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestGet.java
@@ -250,8 +250,7 @@ public class TestGet {
public void testGetRowConstructor() {
byte[] row1 = Bytes.toBytes("testRow");
byte[] row2 = Bytes.toBytes("testtestRow");
- ByteBuffer rowBuffer = ByteBuffer.allocate(16);
- rowBuffer = ByteBuffer.wrap(row1);
+ ByteBuffer rowBuffer = ByteBuffer.wrap(row1);
Get get1 = new Get(rowBuffer);
Get get2 = new Get(row2, 4, 7);
Assert.assertArrayEquals(get1.getRow(), get2.getRow());
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMutation.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMutation.java
index dcb4d6eb88a..718ca05b92f 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMutation.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestMutation.java
@@ -23,7 +23,6 @@ import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.util.List;
import org.apache.hadoop.hbase.Cell;
-import org.apache.hadoop.hbase.Cell.Type;
import org.apache.hadoop.hbase.CellBuilderFactory;
import org.apache.hadoop.hbase.CellBuilderType;
import org.apache.hadoop.hbase.CellUtil;
@@ -51,7 +50,7 @@ public class TestMutation {
byte[] family = Bytes.toBytes("CF-01");
origin.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY).setRow(origin.getRow())
- .setFamily(family).setQualifier(Bytes.toBytes("q")).setType(Type.Put)
+ .setFamily(family).setQualifier(Bytes.toBytes("q")).setType(Cell.Type.Put)
.setValue(Bytes.toBytes(100)).build());
origin.addColumn(family, Bytes.toBytes("q0"), Bytes.toBytes("value"));
origin.setTimeRange(100, 1000);
@@ -89,7 +88,7 @@ public class TestMutation {
byte[] family = Bytes.toBytes("CF-01");
origin.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY).setRow(origin.getRow())
- .setFamily(family).setQualifier(Bytes.toBytes("q")).setType(Type.Delete).build());
+ .setFamily(family).setQualifier(Bytes.toBytes("q")).setType(Cell.Type.Delete).build());
origin.addColumn(family, Bytes.toBytes("q0"));
origin.addColumns(family, Bytes.toBytes("q1"));
origin.addFamily(family);
@@ -187,10 +186,11 @@ public class TestMutation {
Put put = new Put(row, true);
put
.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY).setRow(row).setFamily(family)
- .setQualifier(qualifier0).setTimestamp(put.getTimestamp()).setType(Type.Put)
+ .setQualifier(qualifier0).setTimestamp(put.getTimestamp()).setType(Cell.Type.Put)
.setValue(value0).build())
.add(CellBuilderFactory.create(CellBuilderType.SHALLOW_COPY).setRow(row).setFamily(family)
- .setQualifier(qualifier1).setTimestamp(ts1).setType(Type.Put).setValue(value1).build());
+ .setQualifier(qualifier1).setTimestamp(ts1).setType(Cell.Type.Put).setValue(value1)
+ .build());
// Verify the cell of family:qualifier0
Cell cell0 = put.get(family, qualifier0).get(0);
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestRegionInfoDisplay.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestRegionInfoDisplay.java
index 0403ca64735..6e665dd55fc 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestRegionInfoDisplay.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestRegionInfoDisplay.java
@@ -87,8 +87,8 @@ public class TestRegionInfoDisplay {
String firstPartOrig = origDesc.substring(0, origDesc.indexOf(Bytes.toStringBinary(startKey)));
String secondPartOrig = origDesc.substring(
origDesc.indexOf(Bytes.toStringBinary(startKey)) + Bytes.toStringBinary(startKey).length());
- assert (firstPart.equals(firstPartOrig));
- assert (secondPart.equals(secondPartOrig));
+ Assert.assertTrue(firstPart.equals(firstPartOrig));
+ Assert.assertTrue(secondPart.equals(secondPartOrig));
}
private void checkEquality(RegionInfo ri, Configuration conf) throws IOException {
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestScan.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestScan.java
index 4023d745c06..4b124c68f86 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestScan.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestScan.java
@@ -164,22 +164,17 @@ public class TestScan {
@Test
public void testSetAuthorizations() {
Scan scan = new Scan();
- try {
- scan.setAuthorizations(new Authorizations("\u002b|\u0029"));
- scan.setAuthorizations(new Authorizations("A", "B", "0123", "A0", "1A1", "_a"));
- scan.setAuthorizations(new Authorizations("A|B"));
- scan.setAuthorizations(new Authorizations("A&B"));
- scan.setAuthorizations(new Authorizations("!B"));
- scan.setAuthorizations(new Authorizations("A", "(A)"));
- scan.setAuthorizations(new Authorizations("A", "{A"));
- scan.setAuthorizations(new Authorizations(" "));
- scan.setAuthorizations(new Authorizations(":B"));
- scan.setAuthorizations(new Authorizations("-B"));
- scan.setAuthorizations(new Authorizations(".B"));
- scan.setAuthorizations(new Authorizations("/B"));
- } catch (IllegalArgumentException e) {
- fail("should not throw exception");
- }
+ scan.setAuthorizations(new Authorizations("A", "B", "0123", "A0", "1A1", "_a"));
+ scan.setAuthorizations(new Authorizations("A|B"));
+ scan.setAuthorizations(new Authorizations("A&B"));
+ scan.setAuthorizations(new Authorizations("!B"));
+ scan.setAuthorizations(new Authorizations("A", "(A)"));
+ scan.setAuthorizations(new Authorizations("A", "{A"));
+ scan.setAuthorizations(new Authorizations(" "));
+ scan.setAuthorizations(new Authorizations(":B"));
+ scan.setAuthorizations(new Authorizations("-B"));
+ scan.setAuthorizations(new Authorizations(".B"));
+ scan.setAuthorizations(new Authorizations("/B"));
}
@Test
@@ -192,8 +187,7 @@ public class TestScan {
scan.withStartRow(new byte[HConstants.MAX_ROW_LENGTH + 1]);
fail("should've thrown exception");
} catch (IllegalArgumentException iae) {
- } catch (Exception e) {
- fail("expected IllegalArgumentException to be thrown");
+ // Expected
}
scan.withStopRow(null);
@@ -203,8 +197,7 @@ public class TestScan {
scan.withStopRow(new byte[HConstants.MAX_ROW_LENGTH + 1]);
fail("should've thrown exception");
} catch (IllegalArgumentException iae) {
- } catch (Exception e) {
- fail("expected IllegalArgumentException to be thrown");
+ // Expected
}
}
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSimpleRequestController.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSimpleRequestController.java
index a7fcac95ee9..17dcfb508d5 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSimpleRequestController.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/client/TestSimpleRequestController.java
@@ -27,6 +27,7 @@ import java.io.InterruptedIOException;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
+import java.util.TreeMap;
import java.util.concurrent.BrokenBarrierException;
import java.util.concurrent.CyclicBarrier;
import java.util.concurrent.atomic.AtomicInteger;
@@ -102,6 +103,7 @@ public class TestSimpleRequestController {
new SimpleRequestController(conf);
fail("The " + key + " must be bigger than zero");
} catch (IllegalArgumentException e) {
+ // Expected
}
}
@@ -121,7 +123,7 @@ public class TestSimpleRequestController {
final int maxConcurrentTasksPerRegion = 1;
final AtomicLong tasksInProgress = new AtomicLong(0);
final Map taskCounterPerServer = new HashMap<>();
- final Map taskCounterPerRegion = new HashMap<>();
+ final Map taskCounterPerRegion = new TreeMap<>(Bytes.BYTES_COMPARATOR);
SimpleRequestController.TaskCountChecker countChecker =
new SimpleRequestController.TaskCountChecker(maxTotalConcurrentTasks,
maxConcurrentTasksPerServer, maxConcurrentTasksPerRegion, tasksInProgress,
@@ -284,7 +286,7 @@ public class TestSimpleRequestController {
int maxConcurrentTasksPerRegion = 1;
AtomicLong tasksInProgress = new AtomicLong(0);
Map taskCounterPerServer = new HashMap<>();
- Map taskCounterPerRegion = new HashMap<>();
+ Map taskCounterPerRegion = new TreeMap<>(Bytes.BYTES_COMPARATOR);
SimpleRequestController.TaskCountChecker checker = new SimpleRequestController.TaskCountChecker(
maxTotalConcurrentTasks, maxConcurrentTasksPerServer, maxConcurrentTasksPerRegion,
tasksInProgress, taskCounterPerServer, taskCounterPerRegion);
@@ -358,10 +360,8 @@ public class TestSimpleRequestController {
try {
barrier.await();
controller.waitForMaximumCurrentTasks(max.get(), 123, 1, null);
- } catch (InterruptedIOException e) {
+ } catch (InterruptedIOException | InterruptedException | BrokenBarrierException e) {
Assert.fail(e.getMessage());
- } catch (InterruptedException | BrokenBarrierException e) {
- e.printStackTrace();
}
};
// First test that our runnable thread only exits when tasks is zero.
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/filter/TestKeyOnlyFilter.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/filter/TestKeyOnlyFilter.java
index 860544ba135..0a888e838c1 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/filter/TestKeyOnlyFilter.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/filter/TestKeyOnlyFilter.java
@@ -27,7 +27,6 @@ import org.apache.hadoop.hbase.CellUtil;
import org.apache.hadoop.hbase.HBaseClassTestRule;
import org.apache.hadoop.hbase.HBaseCommonTestingUtil;
import org.apache.hadoop.hbase.KeyValue;
-import org.apache.hadoop.hbase.KeyValue.Type;
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.filter.KeyOnlyFilter.KeyOnlyByteBufferExtendedCell;
import org.apache.hadoop.hbase.filter.KeyOnlyFilter.KeyOnlyCell;
@@ -64,7 +63,8 @@ public class TestKeyOnlyFilter {
byte[] q = Bytes.toBytes("qual1");
byte[] v = Bytes.toBytes("val1");
byte[] tags = Bytes.toBytes("tag1");
- KeyValue kv = new KeyValue(r, f, q, 0, q.length, 1234L, Type.Put, v, 0, v.length, tags);
+ KeyValue kv =
+ new KeyValue(r, f, q, 0, q.length, 1234L, KeyValue.Type.Put, v, 0, v.length, tags);
ByteBuffer buffer = ByteBuffer.wrap(kv.getBuffer());
ByteBufferKeyValue bbCell = new ByteBufferKeyValue(buffer, 0, buffer.remaining());
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestCellBlockBuilder.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestCellBlockBuilder.java
index f9c93811b4e..fccea923635 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestCellBlockBuilder.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestCellBlockBuilder.java
@@ -154,7 +154,7 @@ public class TestCellBlockBuilder {
StopWatch timer = new StopWatch();
timer.start();
for (int i = 0; i < cycles; i++) {
- timerTest(builder, timer, count, size, codec, compressor, false);
+ timerTest(builder, count, size, codec, compressor, false);
}
timer.stop();
LOG.info("Codec=" + codec + ", compression=" + compressor + ", sized=" + false + ", count="
@@ -162,16 +162,15 @@ public class TestCellBlockBuilder {
timer.reset();
timer.start();
for (int i = 0; i < cycles; i++) {
- timerTest(builder, timer, count, size, codec, compressor, true);
+ timerTest(builder, count, size, codec, compressor, true);
}
timer.stop();
LOG.info("Codec=" + codec + ", compression=" + compressor + ", sized=" + true + ", count="
+ count + ", size=" + size + ", + took=" + timer.getTime() + "ms");
}
- private static void timerTest(final CellBlockBuilder builder, final StopWatch timer,
- final int count, final int size, final Codec codec, final CompressionCodec compressor,
- final boolean sized) throws IOException {
+ private static void timerTest(final CellBlockBuilder builder, final int count, final int size,
+ final Codec codec, final CompressionCodec compressor, final boolean sized) throws IOException {
doBuildCellBlockUndoCellBlock(builder, codec, compressor, count, size, sized);
}
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyRpcConnection.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyRpcConnection.java
index a9c40fd3bb7..73f3653a902 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyRpcConnection.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/ipc/TestNettyRpcConnection.java
@@ -20,6 +20,7 @@ package org.apache.hadoop.hbase.ipc;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.junit.Assert.assertThrows;
+import static org.junit.Assert.assertTrue;
import static org.junit.Assert.fail;
import java.io.IOException;
@@ -69,9 +70,8 @@ public class TestNettyRpcConnection {
@Test
public void testPrivateMethodExecutedInEventLoop() throws IllegalAccessException {
- // make sure the test is executed with "-ea"
assertThrows(AssertionError.class, () -> {
- assert false;
+ assertTrue(false);
});
for (Method method : NettyRpcConnection.class.getDeclaredMethods()) {
if (Modifier.isPrivate(method.getModifiers()) && !method.getName().contains("$")) {
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaGlobalsSettingsBypass.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaGlobalsSettingsBypass.java
index e2843180938..a3c96eb45aa 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaGlobalsSettingsBypass.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/quotas/TestQuotaGlobalsSettingsBypass.java
@@ -143,6 +143,7 @@ public class TestQuotaGlobalsSettingsBypass {
one.merge(two);
fail("Expected to see an Exception merging " + two + " into " + one);
} catch (IllegalArgumentException e) {
+ // Expected
}
}
}
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
index 60bd80fb58f..7b42ba224fa 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/security/TestHBaseSaslRpcClient.java
@@ -138,14 +138,8 @@ public class TestHBaseSaslRpcClient {
@Test
public void testHBaseSaslRpcClientCreation() throws Exception {
// creation kerberos principal check section
- assertFalse(assertSuccessCreationKerberosPrincipal(null));
- assertFalse(assertSuccessCreationKerberosPrincipal("DOMAIN.COM"));
- assertFalse(assertSuccessCreationKerberosPrincipal("principal/DOMAIN.COM"));
- if (!assertSuccessCreationKerberosPrincipal("principal/localhost@DOMAIN.COM")) {
- // XXX: This can fail if kerberos support in the OS is not sane, see HBASE-10107.
- // For now, don't assert, just warn
- LOG.warn("Could not create a SASL client with valid Kerberos credential");
- }
+ // Note this is mocked in a way that doesn't care about principal names
+ assertFalse(assertSuccessCreationKerberos());
// creation digest principal check section
assertFalse(assertSuccessCreationDigestPrincipal(null, null));
@@ -155,9 +149,8 @@ public class TestHBaseSaslRpcClient {
assertTrue(assertSuccessCreationDigestPrincipal(DEFAULT_USER_NAME, DEFAULT_USER_PASSWORD));
// creation simple principal check section
- assertFalse(assertSuccessCreationSimplePrincipal("", ""));
- assertFalse(assertSuccessCreationSimplePrincipal(null, null));
- assertFalse(assertSuccessCreationSimplePrincipal(DEFAULT_USER_NAME, DEFAULT_USER_PASSWORD));
+ // Note this is mocked in a way that doesn't care about principal names
+ assertFalse(assertSuccessCreationSimple());
// exceptions check section
assertTrue(assertIOExceptionThenSaslClientIsNull(DEFAULT_USER_NAME, DEFAULT_USER_PASSWORD));
@@ -245,10 +238,11 @@ public class TestHBaseSaslRpcClient {
}
}
- private boolean assertSuccessCreationKerberosPrincipal(String principal) {
+ private boolean assertSuccessCreationKerberos() {
HBaseSaslRpcClient rpcClient = null;
try {
- rpcClient = createSaslRpcClientForKerberos(principal);
+ // createSaslRpcClientForKerberos is mocked in a way that doesn't care about principal names
+ rpcClient = createSaslRpcClientForKerberos();
} catch (Exception ex) {
LOG.error(ex.getMessage(), ex);
}
@@ -268,17 +262,17 @@ public class TestHBaseSaslRpcClient {
return rpcClient != null;
}
- private boolean assertSuccessCreationSimplePrincipal(String principal, String password) {
+ private boolean assertSuccessCreationSimple() {
HBaseSaslRpcClient rpcClient = null;
try {
- rpcClient = createSaslRpcClientSimple(principal, password);
+ rpcClient = createSaslRpcClientSimple();
} catch (Exception ex) {
LOG.error(ex.getMessage(), ex);
}
return rpcClient != null;
}
- private HBaseSaslRpcClient createSaslRpcClientForKerberos(String principal) throws IOException {
+ private HBaseSaslRpcClient createSaslRpcClientForKerberos() throws IOException {
return new HBaseSaslRpcClient(HBaseConfiguration.create(),
new GssSaslClientAuthenticationProvider(), createTokenMock(), Mockito.mock(InetAddress.class),
Mockito.mock(SecurityInfo.class), false);
@@ -294,8 +288,7 @@ public class TestHBaseSaslRpcClient {
return token;
}
- private HBaseSaslRpcClient createSaslRpcClientSimple(String principal, String password)
- throws IOException {
+ private HBaseSaslRpcClient createSaslRpcClientSimple() throws IOException {
return new HBaseSaslRpcClient(HBaseConfiguration.create(),
new SimpleSaslClientAuthenticationProvider(), createTokenMock(),
Mockito.mock(InetAddress.class), Mockito.mock(SecurityInfo.class), false);
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/util/TestRoundRobinPoolMap.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/util/TestRoundRobinPoolMap.java
index d7ce6265b8c..04ba2570eac 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/util/TestRoundRobinPoolMap.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/util/TestRoundRobinPoolMap.java
@@ -120,10 +120,10 @@ public class TestRoundRobinPoolMap extends PoolMapTestBase {
String value = Integer.toString(id.getAndIncrement());
String result = poolMap.getOrCreate(key, () -> value);
results.add(result);
-
- Thread.yield();
+ // Sleep for a short time to ensure a yield. Thread#yield has platform dependent behavior.
+ Thread.sleep(10);
}
- } catch (IOException e) {
+ } catch (Exception e) {
throw new CompletionException(e);
}
};
diff --git a/hbase-client/src/test/java/org/apache/hadoop/hbase/util/TestThreadLocalPoolMap.java b/hbase-client/src/test/java/org/apache/hadoop/hbase/util/TestThreadLocalPoolMap.java
index 45f533f1a73..d83944ef7f3 100644
--- a/hbase-client/src/test/java/org/apache/hadoop/hbase/util/TestThreadLocalPoolMap.java
+++ b/hbase-client/src/test/java/org/apache/hadoop/hbase/util/TestThreadLocalPoolMap.java
@@ -95,10 +95,10 @@ public class TestThreadLocalPoolMap extends PoolMapTestBase {
for (int i = 0; i < 3; i++) {
String result = poolMap.getOrCreate(key, () -> myId);
assertEquals(myId, result);
-
- Thread.yield();
+ // Sleep for a short period of time to yield. Thread#yield is platform dependent.
+ Thread.sleep(10);
}
- } catch (IOException e) {
+ } catch (Exception e) {
throw new CompletionException(e);
}
};