From d971a9d2985e41deff90781856771e02b00da7aa Mon Sep 17 00:00:00 2001 From: Mike Drob Date: Thu, 23 Dec 2021 11:00:54 -0800 Subject: [PATCH] HBASE-26622 Update error-prone to 2.10 Author: Mike Drob Co-authored-by: Nick Dimiduk Signed-off-by: Andrew Purtell --- .../hbase/io/asyncfs/ProtobufDecoder.java | 8 ++------ .../java/org/apache/hadoop/hbase/KeyValue.java | 17 ++++++++--------- .../coprocessor/AggregateImplementation.java | 10 +++++----- .../hbase/mapreduce/HFileOutputFormat2.java | 2 +- .../regionserver/RegionCoprocessorHost.java | 1 + .../hbase/security/token/TokenProvider.java | 2 +- .../client/TestPutDeleteEtcCellIteration.java | 12 +++++------- .../hadoop/hbase/codec/CodecPerformance.java | 11 ++--------- .../hbase/thrift/ThriftHBaseServiceHandler.java | 1 - pom.xml | 2 +- 10 files changed, 26 insertions(+), 40 deletions(-) diff --git a/hbase-asyncfs/src/main/java/org/apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java b/hbase-asyncfs/src/main/java/org/apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java index d6e68f30542..3be9a2e49c1 100644 --- a/hbase-asyncfs/src/main/java/org/apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java +++ b/hbase-asyncfs/src/main/java/org/apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java @@ -132,17 +132,13 @@ public class ProtobufDecoder extends MessageToMessageDecoder { try { getParserForTypeMethod = protobufMessageLiteClass.getDeclaredMethod("getParserForType"); newBuilderForTypeMethod = protobufMessageLiteClass.getDeclaredMethod("newBuilderForType"); + // TODO: If this is false then the class will fail to load? Can refactor it out? + hasParser = true; } catch (NoSuchMethodException e) { // If the method is not found, we are in trouble. Abort. throw new RuntimeException(e); } - try { - protobufMessageLiteClass.getDeclaredMethod("getParserForType"); - hasParser = true; - } catch (Throwable var2) { - } - HAS_PARSER = hasParser; } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java index f4046e4af7b..c05d0be3e56 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java @@ -1102,10 +1102,10 @@ public class KeyValue implements ExtendedCell, Cloneable { */ @Override public KeyValue clone() throws CloneNotSupportedException { - super.clone(); - byte [] b = new byte[this.length]; - System.arraycopy(this.bytes, this.offset, b, 0, this.length); - KeyValue ret = new KeyValue(b, 0, b.length); + KeyValue ret = (KeyValue) super.clone(); + ret.bytes = Arrays.copyOf(this.bytes, this.bytes.length); + ret.offset = 0; + ret.length = ret.bytes.length; // Important to clone the memstoreTS as well - otherwise memstore's // update-in-place methods (eg increment) will end up creating // new entries @@ -1720,8 +1720,8 @@ public class KeyValue implements ExtendedCell, Cloneable { } @Override - protected Object clone() throws CloneNotSupportedException { - return new MetaComparator(); + protected MetaComparator clone() throws CloneNotSupportedException { + return (MetaComparator) super.clone(); } /** @@ -2248,9 +2248,8 @@ public class KeyValue implements ExtendedCell, Cloneable { } @Override - protected Object clone() throws CloneNotSupportedException { - super.clone(); - return new KVComparator(); + protected KVComparator clone() throws CloneNotSupportedException { + return (KVComparator) super.clone(); } } diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java index a7181f962cd..5571e1b14cb 100644 --- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java +++ b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java @@ -53,11 +53,11 @@ import org.slf4j.LoggerFactory; * {@link ColumnInterpreter} is used to interpret column value. This class is * parameterized with the following (these are the types with which the {@link ColumnInterpreter} * is parameterized, and for more description on these, refer to {@link ColumnInterpreter}): - * @param T Cell value data type - * @param S Promoted data type - * @param P PB message that is used to transport initializer specific bytes - * @param Q PB message that is used to transport Cell (<T>) instance - * @param R PB message that is used to transport Promoted (<S>) instance + * @param Cell value data type + * @param Promoted data type + * @param

PB message that is used to transport initializer specific bytes + * @param PB message that is used to transport Cell (<T>) instance + * @param PB message that is used to transport Promoted (<S>) instance */ @InterfaceAudience.Private public class AggregateImplementation diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java index ca7c9a39f32..a3c3f11c5aa 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java @@ -95,7 +95,7 @@ import org.slf4j.LoggerFactory; /** * Writes HFiles. Passed Cells must arrive in order. * Writes current time as the sequence id for the file. Sets the major compacted - * attribute on created @{link {@link HFile}s. Calling write(null,null) will forcibly roll + * attribute on created {@link HFile}s. Calling write(null,null) will forcibly roll * all HFiles being written. *

* Using this class as part of a MapReduce job is best done diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java index 6961bfdaf1a..78565c12966 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java @@ -260,6 +260,7 @@ public class RegionCoprocessorHost * @param rsServices interface to available region server functionality * @param conf the configuration */ + @SuppressWarnings("ReturnValueIgnored") // Checking method exists as CPU optimization public RegionCoprocessorHost(final HRegion region, final RegionServerServices rsServices, final Configuration conf) { super(rsServices); diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java index 92bd0dbe309..28fef37f5b0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/security/token/TokenProvider.java @@ -129,7 +129,7 @@ public class TokenProvider implements AuthenticationProtos.AuthenticationService Token token = secretManager.generateToken(currentUser.getName()); - response.setToken(ClientTokenUtil.toToken(token)).build(); + response.setToken(ClientTokenUtil.toToken(token)); } catch (IOException ioe) { CoprocessorRpcUtils.setControllerException(controller, ioe); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java index 9f2cc011464..b5e1178cca8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java @@ -61,7 +61,7 @@ public class TestPutDeleteEtcCellIteration { for (CellScanner cellScanner = p.cellScanner(); cellScanner.advance();) { Cell cell = cellScanner.current(); byte [] bytes = Bytes.toBytes(index++); - cell.equals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes)); + assertEquals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes), cell); } assertEquals(COUNT, index); } @@ -74,15 +74,13 @@ public class TestPutDeleteEtcCellIteration { p.addColumn(bytes, bytes, TIMESTAMP, bytes); } int index = 0; - int trigger = 3; for (CellScanner cellScanner = p.cellScanner(); cellScanner.advance();) { Cell cell = cellScanner.current(); byte [] bytes = Bytes.toBytes(index++); // When we hit the trigger, try inserting a new KV; should trigger exception - if (trigger == 3) p.addColumn(bytes, bytes, TIMESTAMP, bytes); - cell.equals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes)); + p.addColumn(bytes, bytes, TIMESTAMP, bytes); + assertEquals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes), cell); } - assertEquals(COUNT, index); } @Test @@ -96,7 +94,7 @@ public class TestPutDeleteEtcCellIteration { for (CellScanner cellScanner = d.cellScanner(); cellScanner.advance();) { Cell cell = cellScanner.current(); byte [] bytes = Bytes.toBytes(index++); - cell.equals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, KeyValue.Type.DeleteColumn)); + assertEquals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, KeyValue.Type.Delete), cell); } assertEquals(COUNT, index); } @@ -151,7 +149,7 @@ public class TestPutDeleteEtcCellIteration { for (CellScanner cellScanner = r.cellScanner(); cellScanner.advance();) { Cell cell = cellScanner.current(); byte [] bytes = Bytes.toBytes(index++); - cell.equals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes)); + assertEquals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes), cell); } assertEquals(COUNT, index); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java index 73f5ca0959f..e801b5b4bec 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.codec; -import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertTrue; import java.io.ByteArrayInputStream; @@ -30,10 +30,6 @@ import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.codec.CellCodec; -import org.apache.hadoop.hbase.codec.Codec; -import org.apache.hadoop.hbase.codec.KeyValueCodec; -import org.apache.hadoop.hbase.codec.MessageCodec; import org.apache.hadoop.hbase.io.CellOutputStream; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; @@ -96,10 +92,7 @@ public class CodecPerformance { } static void verifyCells(final Cell [] input, final Cell [] output) { - assertEquals(input.length, output.length); - for (int i = 0; i < input.length; i ++) { - input[i].equals(output[i]); - } + assertArrayEquals(input, output); } static void doCodec(final Codec codec, final Cell [] cells, final int cycles, final int count, diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java index c942977a8e5..369b2beed41 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java @@ -154,7 +154,6 @@ public class ThriftHBaseServiceHandler extends HBaseServiceHandler implements Hb * id->scanner hash-map. * * @param id the ID of the scanner to remove - * @return a Scanner, or null if ID was invalid. */ private synchronized void removeScanner(int id) { scannerMap.invalidate(id); diff --git a/pom.xml b/pom.xml index daaf9dc9712..d3bf57a4a2e 100755 --- a/pom.xml +++ b/pom.xml @@ -1547,7 +1547,7 @@ --> 8.28 1.6.0 - 2.4.0 + 2.10.0 2.4.2 1.0.0 1.8