HBASE-26622 Update error-prone to 2.10
Author: Mike Drob <mdrob@apache.org> Co-authored-by: Nick Dimiduk <ndimiduk@apache.org> Signed-off-by: Andrew Purtell <apurtell@apache.org>
This commit is contained in:
parent
87f8d9ac4e
commit
06f06cbd86
|
@ -132,17 +132,13 @@ public class ProtobufDecoder extends MessageToMessageDecoder<ByteBuf> {
|
|||
try {
|
||||
getParserForTypeMethod = protobufMessageLiteClass.getDeclaredMethod("getParserForType");
|
||||
newBuilderForTypeMethod = protobufMessageLiteClass.getDeclaredMethod("newBuilderForType");
|
||||
// TODO: If this is false then the class will fail to load? Can refactor it out?
|
||||
hasParser = true;
|
||||
} catch (NoSuchMethodException e) {
|
||||
// If the method is not found, we are in trouble. Abort.
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
|
||||
try {
|
||||
protobufMessageLiteClass.getDeclaredMethod("getParserForType");
|
||||
hasParser = true;
|
||||
} catch (Throwable var2) {
|
||||
}
|
||||
|
||||
HAS_PARSER = hasParser;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1102,10 +1102,10 @@ public class KeyValue implements ExtendedCell, Cloneable {
|
|||
*/
|
||||
@Override
|
||||
public KeyValue clone() throws CloneNotSupportedException {
|
||||
super.clone();
|
||||
byte [] b = new byte[this.length];
|
||||
System.arraycopy(this.bytes, this.offset, b, 0, this.length);
|
||||
KeyValue ret = new KeyValue(b, 0, b.length);
|
||||
KeyValue ret = (KeyValue) super.clone();
|
||||
ret.bytes = Arrays.copyOf(this.bytes, this.bytes.length);
|
||||
ret.offset = 0;
|
||||
ret.length = ret.bytes.length;
|
||||
// Important to clone the memstoreTS as well - otherwise memstore's
|
||||
// update-in-place methods (eg increment) will end up creating
|
||||
// new entries
|
||||
|
@ -1720,8 +1720,8 @@ public class KeyValue implements ExtendedCell, Cloneable {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected Object clone() throws CloneNotSupportedException {
|
||||
return new MetaComparator();
|
||||
protected MetaComparator clone() throws CloneNotSupportedException {
|
||||
return (MetaComparator) super.clone();
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -2248,9 +2248,8 @@ public class KeyValue implements ExtendedCell, Cloneable {
|
|||
}
|
||||
|
||||
@Override
|
||||
protected Object clone() throws CloneNotSupportedException {
|
||||
super.clone();
|
||||
return new KVComparator();
|
||||
protected KVComparator clone() throws CloneNotSupportedException {
|
||||
return (KVComparator) super.clone();
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -53,11 +53,11 @@ import org.apache.hadoop.hbase.shaded.protobuf.generated.AggregateProtos.Aggrega
|
|||
* {@link ColumnInterpreter} is used to interpret column value. This class is
|
||||
* parameterized with the following (these are the types with which the {@link ColumnInterpreter}
|
||||
* is parameterized, and for more description on these, refer to {@link ColumnInterpreter}):
|
||||
* @param T Cell value data type
|
||||
* @param S Promoted data type
|
||||
* @param P PB message that is used to transport initializer specific bytes
|
||||
* @param Q PB message that is used to transport Cell (<T>) instance
|
||||
* @param R PB message that is used to transport Promoted (<S>) instance
|
||||
* @param <T> Cell value data type
|
||||
* @param <S> Promoted data type
|
||||
* @param <P> PB message that is used to transport initializer specific bytes
|
||||
* @param <Q> PB message that is used to transport Cell (<T>) instance
|
||||
* @param <R> PB message that is used to transport Promoted (<S>) instance
|
||||
*/
|
||||
@InterfaceAudience.Private
|
||||
public class AggregateImplementation<T, S, P extends Message, Q extends Message, R extends Message>
|
||||
|
|
|
@ -95,7 +95,7 @@ import org.slf4j.LoggerFactory;
|
|||
/**
|
||||
* Writes HFiles. Passed Cells must arrive in order.
|
||||
* Writes current time as the sequence id for the file. Sets the major compacted
|
||||
* attribute on created @{link {@link HFile}s. Calling write(null,null) will forcibly roll
|
||||
* attribute on created {@link HFile}s. Calling write(null,null) will forcibly roll
|
||||
* all HFiles being written.
|
||||
* <p>
|
||||
* Using this class as part of a MapReduce job is best done
|
||||
|
|
|
@ -256,6 +256,7 @@ public class RegionCoprocessorHost
|
|||
* @param rsServices interface to available region server functionality
|
||||
* @param conf the configuration
|
||||
*/
|
||||
@SuppressWarnings("ReturnValueIgnored") // Checking method exists as CPU optimization
|
||||
public RegionCoprocessorHost(final HRegion region,
|
||||
final RegionServerServices rsServices, final Configuration conf) {
|
||||
super(rsServices);
|
||||
|
|
|
@ -61,7 +61,7 @@ public class TestPutDeleteEtcCellIteration {
|
|||
for (CellScanner cellScanner = p.cellScanner(); cellScanner.advance();) {
|
||||
Cell cell = cellScanner.current();
|
||||
byte [] bytes = Bytes.toBytes(index++);
|
||||
cell.equals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes));
|
||||
assertEquals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes), cell);
|
||||
}
|
||||
assertEquals(COUNT, index);
|
||||
}
|
||||
|
@ -74,15 +74,13 @@ public class TestPutDeleteEtcCellIteration {
|
|||
p.addColumn(bytes, bytes, TIMESTAMP, bytes);
|
||||
}
|
||||
int index = 0;
|
||||
int trigger = 3;
|
||||
for (CellScanner cellScanner = p.cellScanner(); cellScanner.advance();) {
|
||||
Cell cell = cellScanner.current();
|
||||
byte [] bytes = Bytes.toBytes(index++);
|
||||
// When we hit the trigger, try inserting a new KV; should trigger exception
|
||||
if (trigger == 3) p.addColumn(bytes, bytes, TIMESTAMP, bytes);
|
||||
cell.equals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes));
|
||||
p.addColumn(bytes, bytes, TIMESTAMP, bytes);
|
||||
assertEquals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes), cell);
|
||||
}
|
||||
assertEquals(COUNT, index);
|
||||
}
|
||||
|
||||
@Test
|
||||
|
@ -96,7 +94,7 @@ public class TestPutDeleteEtcCellIteration {
|
|||
for (CellScanner cellScanner = d.cellScanner(); cellScanner.advance();) {
|
||||
Cell cell = cellScanner.current();
|
||||
byte [] bytes = Bytes.toBytes(index++);
|
||||
cell.equals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, KeyValue.Type.DeleteColumn));
|
||||
assertEquals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, KeyValue.Type.Delete), cell);
|
||||
}
|
||||
assertEquals(COUNT, index);
|
||||
}
|
||||
|
@ -151,7 +149,7 @@ public class TestPutDeleteEtcCellIteration {
|
|||
for (CellScanner cellScanner = r.cellScanner(); cellScanner.advance();) {
|
||||
Cell cell = cellScanner.current();
|
||||
byte [] bytes = Bytes.toBytes(index++);
|
||||
cell.equals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes));
|
||||
assertEquals(new KeyValue(ROW, bytes, bytes, TIMESTAMP, bytes), cell);
|
||||
}
|
||||
assertEquals(COUNT, index);
|
||||
}
|
||||
|
|
|
@ -17,7 +17,7 @@
|
|||
*/
|
||||
package org.apache.hadoop.hbase.codec;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.junit.Assert.assertArrayEquals;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
|
||||
import java.io.ByteArrayInputStream;
|
||||
|
@ -30,10 +30,6 @@ import org.slf4j.LoggerFactory;
|
|||
import org.apache.hadoop.hbase.Cell;
|
||||
import org.apache.hadoop.hbase.CellScanner;
|
||||
import org.apache.hadoop.hbase.KeyValue;
|
||||
import org.apache.hadoop.hbase.codec.CellCodec;
|
||||
import org.apache.hadoop.hbase.codec.Codec;
|
||||
import org.apache.hadoop.hbase.codec.KeyValueCodec;
|
||||
import org.apache.hadoop.hbase.codec.MessageCodec;
|
||||
import org.apache.hadoop.hbase.io.CellOutputStream;
|
||||
import org.apache.hadoop.hbase.util.Bytes;
|
||||
import org.apache.hadoop.hbase.util.EnvironmentEdgeManager;
|
||||
|
@ -96,10 +92,7 @@ public class CodecPerformance {
|
|||
}
|
||||
|
||||
static void verifyCells(final Cell [] input, final Cell [] output) {
|
||||
assertEquals(input.length, output.length);
|
||||
for (int i = 0; i < input.length; i ++) {
|
||||
input[i].equals(output[i]);
|
||||
}
|
||||
assertArrayEquals(input, output);
|
||||
}
|
||||
|
||||
static void doCodec(final Codec codec, final Cell [] cells, final int cycles, final int count,
|
||||
|
|
|
@ -18,15 +18,18 @@
|
|||
package org.apache.hadoop.hbase.regionserver;
|
||||
|
||||
import static org.apache.hadoop.hbase.regionserver.storefiletracker.StoreFileTrackerFactory.TRACKER_IMPL;
|
||||
import static org.hamcrest.MatcherAssert.assertThat;
|
||||
import static org.hamcrest.Matchers.containsString;
|
||||
import static org.hamcrest.Matchers.everyItem;
|
||||
import static org.hamcrest.Matchers.hasItem;
|
||||
import static org.hamcrest.Matchers.hasProperty;
|
||||
import static org.hamcrest.Matchers.not;
|
||||
import static org.junit.Assert.assertTrue;
|
||||
import static org.junit.Assert.fail;
|
||||
|
||||
import java.io.IOException;
|
||||
import java.util.ArrayList;
|
||||
import java.util.List;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
import org.apache.commons.lang3.mutable.MutableBoolean;
|
||||
import org.apache.hadoop.fs.FileStatus;
|
||||
import org.apache.hadoop.fs.FileSystem;
|
||||
import org.apache.hadoop.fs.FileUtil;
|
||||
|
@ -222,21 +225,12 @@ public class TestMergesSplitsAddToTracker {
|
|||
return new Pair<>(fileInfo, copyName);
|
||||
}
|
||||
|
||||
private void validateDaughterRegionsFiles(HRegion region, String orignalFileName,
|
||||
private void validateDaughterRegionsFiles(HRegion region, String originalFileName,
|
||||
String untrackedFile) throws IOException {
|
||||
//verify there's no link for the untracked, copied file in first region
|
||||
List<StoreFileInfo> infos = region.getRegionFileSystem().getStoreFiles("info");
|
||||
final MutableBoolean foundLink = new MutableBoolean(false);
|
||||
infos.stream().forEach(i -> {
|
||||
i.getActiveFileName().contains(orignalFileName);
|
||||
if(i.getActiveFileName().contains(untrackedFile)){
|
||||
fail();
|
||||
}
|
||||
if(i.getActiveFileName().contains(orignalFileName)){
|
||||
foundLink.setTrue();
|
||||
}
|
||||
});
|
||||
assertTrue(foundLink.booleanValue());
|
||||
assertThat(infos, everyItem(hasProperty("activeFileName", not(containsString(untrackedFile)))));
|
||||
assertThat(infos, hasItem(hasProperty("activeFileName", containsString(originalFileName))));
|
||||
}
|
||||
|
||||
private void verifyFilesAreTracked(Path regionDir, FileSystem fs) throws Exception {
|
||||
|
|
|
@ -155,7 +155,6 @@ public class ThriftHBaseServiceHandler extends HBaseServiceHandler implements Hb
|
|||
* id->scanner hash-map.
|
||||
*
|
||||
* @param id the ID of the scanner to remove
|
||||
* @return a Scanner, or null if ID was invalid.
|
||||
*/
|
||||
private synchronized void removeScanner(int id) {
|
||||
scannerMap.invalidate(id);
|
||||
|
|
2
pom.xml
2
pom.xml
|
@ -1816,7 +1816,7 @@
|
|||
-->
|
||||
<checkstyle.version>8.28</checkstyle.version>
|
||||
<exec.maven.version>1.6.0</exec.maven.version>
|
||||
<error-prone.version>2.4.0</error-prone.version>
|
||||
<error-prone.version>2.10.0</error-prone.version>
|
||||
<jamon.plugin.version>2.4.2</jamon.plugin.version>
|
||||
<lifecycle.mapping.version>1.0.0</lifecycle.mapping.version>
|
||||
<maven.antrun.version>1.8</maven.antrun.version>
|
||||
|
|
Loading…
Reference in New Issue