diff --git a/hbase-asyncfs/src/main/java/org/apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java b/hbase-asyncfs/src/main/java/org/apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java index d6e68f30542..3be9a2e49c1 100644 --- a/hbase-asyncfs/src/main/java/org/apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java +++ b/hbase-asyncfs/src/main/java/org/apache/hadoop/hbase/io/asyncfs/ProtobufDecoder.java @@ -132,17 +132,13 @@ public class ProtobufDecoder extends MessageToMessageDecoder { try { getParserForTypeMethod = protobufMessageLiteClass.getDeclaredMethod("getParserForType"); newBuilderForTypeMethod = protobufMessageLiteClass.getDeclaredMethod("newBuilderForType"); + // TODO: If this is false then the class will fail to load? Can refactor it out? + hasParser = true; } catch (NoSuchMethodException e) { // If the method is not found, we are in trouble. Abort. throw new RuntimeException(e); } - try { - protobufMessageLiteClass.getDeclaredMethod("getParserForType"); - hasParser = true; - } catch (Throwable var2) { - } - HAS_PARSER = hasParser; } } diff --git a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java index 6c72baa2db1..205d4a7cd4b 100644 --- a/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java +++ b/hbase-common/src/main/java/org/apache/hadoop/hbase/KeyValue.java @@ -1102,10 +1102,10 @@ public class KeyValue implements ExtendedCell, Cloneable { */ @Override public KeyValue clone() throws CloneNotSupportedException { - super.clone(); - byte [] b = new byte[this.length]; - System.arraycopy(this.bytes, this.offset, b, 0, this.length); - KeyValue ret = new KeyValue(b, 0, b.length); + KeyValue ret = (KeyValue) super.clone(); + ret.bytes = Arrays.copyOf(this.bytes, this.bytes.length); + ret.offset = 0; + ret.length = ret.bytes.length; // Important to clone the memstoreTS as well - otherwise memstore's // update-in-place methods (eg increment) will end up creating // new entries @@ -1720,8 +1720,8 @@ public class KeyValue implements ExtendedCell, Cloneable { } @Override - protected Object clone() throws CloneNotSupportedException { - return new MetaComparator(); + protected MetaComparator clone() throws CloneNotSupportedException { + return (MetaComparator) super.clone(); } /** @@ -2248,9 +2248,8 @@ public class KeyValue implements ExtendedCell, Cloneable { } @Override - protected Object clone() throws CloneNotSupportedException { - super.clone(); - return new KVComparator(); + protected KVComparator clone() throws CloneNotSupportedException { + return (KVComparator) super.clone(); } } diff --git a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java index cb0e3c06f90..73eef310426 100644 --- a/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java +++ b/hbase-endpoint/src/main/java/org/apache/hadoop/hbase/coprocessor/AggregateImplementation.java @@ -53,11 +53,11 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AggregateProtos.Aggrega * {@link ColumnInterpreter} is used to interpret column value. This class is * parameterized with the following (these are the types with which the {@link ColumnInterpreter} * is parameterized, and for more description on these, refer to {@link ColumnInterpreter}): - * @param T Cell value data type - * @param S Promoted data type - * @param P PB message that is used to transport initializer specific bytes - * @param Q PB message that is used to transport Cell (<T>) instance - * @param R PB message that is used to transport Promoted (<S>) instance + * @param Cell value data type + * @param Promoted data type + * @param

PB message that is used to transport initializer specific bytes + * @param PB message that is used to transport Cell (<T>) instance + * @param PB message that is used to transport Promoted (<S>) instance */ @InterfaceAudience.Private public class AggregateImplementation diff --git a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java index 43bb594ec19..d68ee88fe4a 100644 --- a/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java +++ b/hbase-mapreduce/src/main/java/org/apache/hadoop/hbase/mapreduce/HFileOutputFormat2.java @@ -95,7 +95,7 @@ import org.slf4j.LoggerFactory; /** * Writes HFiles. Passed Cells must arrive in order. * Writes current time as the sequence id for the file. Sets the major compacted - * attribute on created @{link {@link HFile}s. Calling write(null,null) will forcibly roll + * attribute on created {@link HFile}s. Calling write(null,null) will forcibly roll * all HFiles being written. *

* Using this class as part of a MapReduce job is best done diff --git a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java index a916d0d2715..7b79e704af0 100644 --- a/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java +++ b/hbase-server/src/main/java/org/apache/hadoop/hbase/regionserver/RegionCoprocessorHost.java @@ -256,6 +256,7 @@ public class RegionCoprocessorHost * @param rsServices interface to available region server functionality * @param conf the configuration */ + @SuppressWarnings("ReturnValueIgnored") // Checking method exists as CPU optimization public RegionCoprocessorHost(final HRegion region, final RegionServerServices rsServices, final Configuration conf) { super(rsServices); diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java index 9f2cc011464..b5e1178cca8 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/client/TestPutDeleteEtcCellIteration.java @@ -61,7 +61,7 @@ public class TestPutDeleteEtcCellIteration { for (CellScanner cellScanner = p.cellScanner(); cellScanner.advance();) { Cell cell = cellScanner.current(); byte [] bytes = Bytes.toBytes(index++); - cell.equals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes)); + assertEquals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes), cell); } assertEquals(COUNT, index); } @@ -74,15 +74,13 @@ public class TestPutDeleteEtcCellIteration { p.addColumn(bytes, bytes, TIMESTAMP, bytes); } int index = 0; - int trigger = 3; for (CellScanner cellScanner = p.cellScanner(); cellScanner.advance();) { Cell cell = cellScanner.current(); byte [] bytes = Bytes.toBytes(index++); // When we hit the trigger, try inserting a new KV; should trigger exception - if (trigger == 3) p.addColumn(bytes, bytes, TIMESTAMP, bytes); - cell.equals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes)); + p.addColumn(bytes, bytes, TIMESTAMP, bytes); + assertEquals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes), cell); } - assertEquals(COUNT, index); } @Test @@ -96,7 +94,7 @@ public class TestPutDeleteEtcCellIteration { for (CellScanner cellScanner = d.cellScanner(); cellScanner.advance();) { Cell cell = cellScanner.current(); byte [] bytes = Bytes.toBytes(index++); - cell.equals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, KeyValue.Type.DeleteColumn)); + assertEquals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, KeyValue.Type.Delete), cell); } assertEquals(COUNT, index); } @@ -151,7 +149,7 @@ public class TestPutDeleteEtcCellIteration { for (CellScanner cellScanner = r.cellScanner(); cellScanner.advance();) { Cell cell = cellScanner.current(); byte [] bytes = Bytes.toBytes(index++); - cell.equals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes)); + assertEquals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes), cell); } assertEquals(COUNT, index); } diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java index 73f5ca0959f..e801b5b4bec 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/codec/CodecPerformance.java @@ -17,7 +17,7 @@ */ package org.apache.hadoop.hbase.codec; -import static org.junit.Assert.assertEquals; +import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertTrue; import java.io.ByteArrayInputStream; @@ -30,10 +30,6 @@ import org.slf4j.LoggerFactory; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellScanner; import org.apache.hadoop.hbase.KeyValue; -import org.apache.hadoop.hbase.codec.CellCodec; -import org.apache.hadoop.hbase.codec.Codec; -import org.apache.hadoop.hbase.codec.KeyValueCodec; -import org.apache.hadoop.hbase.codec.MessageCodec; import org.apache.hadoop.hbase.io.CellOutputStream; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.EnvironmentEdgeManager; @@ -96,10 +92,7 @@ public class CodecPerformance { } static void verifyCells(final Cell [] input, final Cell [] output) { - assertEquals(input.length, output.length); - for (int i = 0; i < input.length; i ++) { - input[i].equals(output[i]); - } + assertArrayEquals(input, output); } static void doCodec(final Codec codec, final Cell [] cells, final int cycles, final int count, diff --git a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMergesSplitsAddToTracker.java b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMergesSplitsAddToTracker.java index 2a5e2f4c558..306b771341b 100644 --- a/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMergesSplitsAddToTracker.java +++ b/hbase-server/src/test/java/org/apache/hadoop/hbase/regionserver/TestMergesSplitsAddToTracker.java @@ -18,15 +18,18 @@ package org.apache.hadoop.hbase.regionserver; import static org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory.TRACKER_IMPL; +import static org.hamcrest.MatcherAssert.assertThat; +import static org.hamcrest.Matchers.containsString; +import static org.hamcrest.Matchers.everyItem; +import static org.hamcrest.Matchers.hasItem; +import static org.hamcrest.Matchers.hasProperty; +import static org.hamcrest.Matchers.not; import static org.junit.Assert.assertTrue; -import static org.junit.Assert.fail; - import java.io.IOException; import java.util.ArrayList; import java.util.List; import java.util.UUID; import java.util.concurrent.TimeUnit; -import org.apache.commons.lang3.mutable.MutableBoolean; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.FileUtil; @@ -222,21 +225,12 @@ public class TestMergesSplitsAddToTracker { return new Pair<>(fileInfo, copyName); } - private void validateDaughterRegionsFiles(HRegion region, String orignalFileName, + private void validateDaughterRegionsFiles(HRegion region, String originalFileName, String untrackedFile) throws IOException { //verify there's no link for the untracked, copied file in first region List infos = region.getRegionFileSystem().getStoreFiles("info"); - final MutableBoolean foundLink = new MutableBoolean(false); - infos.stream().forEach(i -> { - i.getActiveFileName().contains(orignalFileName); - if(i.getActiveFileName().contains(untrackedFile)){ - fail(); - } - if(i.getActiveFileName().contains(orignalFileName)){ - foundLink.setTrue(); - } - }); - assertTrue(foundLink.booleanValue()); + assertThat(infos, everyItem(hasProperty("activeFileName", not(containsString(untrackedFile))))); + assertThat(infos, hasItem(hasProperty("activeFileName", containsString(originalFileName)))); } private void verifyFilesAreTracked(Path regionDir, FileSystem fs) throws Exception { diff --git a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java index 37cf8d69266..b91ad0983d4 100644 --- a/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java +++ b/hbase-thrift/src/main/java/org/apache/hadoop/hbase/thrift/ThriftHBaseServiceHandler.java @@ -155,7 +155,6 @@ public class ThriftHBaseServiceHandler extends HBaseServiceHandler implements Hb * id->scanner hash-map. * * @param id the ID of the scanner to remove - * @return a Scanner, or null if ID was invalid. */ private synchronized void removeScanner(int id) { scannerMap.invalidate(id); diff --git a/pom.xml b/pom.xml index cbe7482bfd1..44d76e28073 100755 --- a/pom.xml +++ b/pom.xml @@ -1816,7 +1816,7 @@ --> 8.28 1.6.0 - 2.4.0 + 2.10.0 2.4.2 1.0.0 1.8