Update errorprone, mockito, jacoco, checkerframework. (#17414)

* Update errorprone, mockito, jacoco, checkerframework.

This patch updates various build and test dependencies, to see if they
cause unit tests on JDK 21 to behave more reliably.

* Update licenses, tests.

* Remove assertEquals.

* Repair two tests.

* Update some more tests.
This commit is contained in:
Gian Merlino 2024-10-28 11:34:03 -07:00 committed by GitHub
parent 73675d0671
commit 446a8f466f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
14 changed files with 61 additions and 82 deletions

View File

@ -19,6 +19,7 @@
package org.apache.druid.query.aggregation.datasketches.hll; package org.apache.druid.query.aggregation.datasketches.hll;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.apache.datasketches.hll.HllSketch; import org.apache.datasketches.hll.HllSketch;
import org.apache.datasketches.hll.TgtHllType; import org.apache.datasketches.hll.TgtHllType;
import org.apache.druid.java.util.common.StringEncoding; import org.apache.druid.java.util.common.StringEncoding;
@ -120,11 +121,9 @@ public class HllSketchAggregatorFactoryTest
} }
@Test @Test
public void testEqualsSameObject() public void testEquals()
{ {
//noinspection EqualsWithItself EqualsVerifier.forClass(HllSketchAggregatorFactory.class).usingGetClass().verify();
Assert.assertEquals(target, target);
Assert.assertArrayEquals(target.getCacheKey(), target.getCacheKey());
} }
@Test @Test

View File

@ -22,6 +22,7 @@ package org.apache.druid.data.input.kafkainput;
import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.apache.druid.data.input.kafka.KafkaRecordEntity; import org.apache.druid.data.input.kafka.KafkaRecordEntity;
import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.common.Pair; import org.apache.druid.java.util.common.Pair;
@ -72,15 +73,11 @@ public class KafkaStringHeaderFormatTest
} }
); );
private KafkaRecordEntity inputEntity; private KafkaRecordEntity inputEntity;
private long timestamp = DateTimes.of("2021-06-24T00:00:00.000Z").getMillis(); private final long timestamp = DateTimes.of("2021-06-24T00:00:00.000Z").getMillis();
@Test @Test
public void testSerde() throws JsonProcessingException public void testSerde() throws JsonProcessingException
{ {
Assert.assertEquals(
KAFKAHEADERNOENCODE,
KAFKAHEADERNOENCODE
);
Assert.assertEquals( Assert.assertEquals(
KAFKAHEADERNOENCODE, KAFKAHEADERNOENCODE,
MAPPER.readValue(MAPPER.writeValueAsString(KAFKAHEADERNOENCODE), KafkaStringHeaderFormat.class) MAPPER.readValue(MAPPER.writeValueAsString(KAFKAHEADERNOENCODE), KafkaStringHeaderFormat.class)
@ -92,6 +89,12 @@ public class KafkaStringHeaderFormatTest
); );
} }
@Test
public void testEquals()
{
EqualsVerifier.forClass(KafkaStringHeaderFormat.class).usingGetClass().verify();
}
@Test @Test
public void testDefaultHeaderFormat() public void testDefaultHeaderFormat()
{ {

View File

@ -140,6 +140,11 @@
<artifactId>hamcrest-core</artifactId> <artifactId>hamcrest-core</artifactId>
<scope>test</scope> <scope>test</scope>
</dependency> </dependency>
<dependency>
<groupId>nl.jqno.equalsverifier</groupId>
<artifactId>equalsverifier</artifactId>
<scope>test</scope>
</dependency>
<!-- explicitly declare mockito-core dependency to make anaylize-dependencies happy when running with Java 8 --> <!-- explicitly declare mockito-core dependency to make anaylize-dependencies happy when running with Java 8 -->
<dependency> <dependency>
<groupId>org.mockito</groupId> <groupId>org.mockito</groupId>

View File

@ -70,19 +70,22 @@ public class DataSegmentAndIndexZipFilePath
@Override @Override
public boolean equals(Object o) public boolean equals(Object o)
{ {
if (o instanceof DataSegmentAndIndexZipFilePath) { if (this == o) {
DataSegmentAndIndexZipFilePath that = (DataSegmentAndIndexZipFilePath) o; return true;
return segment.equals(((DataSegmentAndIndexZipFilePath) o).getSegment())
&& tmpIndexZipFilePath.equals(that.getTmpIndexZipFilePath())
&& finalIndexZipFilePath.equals(that.getFinalIndexZipFilePath());
} }
if (o == null || getClass() != o.getClass()) {
return false; return false;
} }
DataSegmentAndIndexZipFilePath that = (DataSegmentAndIndexZipFilePath) o;
return Objects.equals(segment, that.segment)
&& Objects.equals(tmpIndexZipFilePath, that.tmpIndexZipFilePath)
&& Objects.equals(finalIndexZipFilePath, that.finalIndexZipFilePath);
}
@Override @Override
public int hashCode() public int hashCode()
{ {
return Objects.hash(segment.getId(), tmpIndexZipFilePath); return Objects.hash(segment, tmpIndexZipFilePath, finalIndexZipFilePath);
} }
@Override @Override

View File

@ -21,6 +21,7 @@ package org.apache.druid.indexer;
import com.fasterxml.jackson.databind.InjectableValues; import com.fasterxml.jackson.databind.InjectableValues;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.SegmentId; import org.apache.druid.timeline.SegmentId;
@ -149,17 +150,9 @@ public class DataSegmentAndIndexZipFilePathTest
} }
@Test @Test
public void test_equals_sameObject_equal() public void test_equals()
{ {
String tmpPath = "tmpPath"; EqualsVerifier.forClass(DataSegmentAndIndexZipFilePath.class).usingGetClass().verify();
String finalPath = "finalPath";
target = new DataSegmentAndIndexZipFilePath(
SEGMENT,
tmpPath,
finalPath
);
Assert.assertEquals(target, target);
} }
@Test @Test

View File

@ -331,7 +331,7 @@ name: Error Prone Annotations
license_category: binary license_category: binary
module: java-core module: java-core
license_name: Apache License version 2.0 license_name: Apache License version 2.0
version: 2.20.0 version: 2.35.1
libraries: libraries:
- com.google.errorprone: error_prone_annotations - com.google.errorprone: error_prone_annotations
@ -3285,7 +3285,7 @@ name: Checker Qual
license_category: binary license_category: binary
module: java-core module: java-core
license_name: MIT License license_name: MIT License
version: 2.5.7 version: 3.48.1
copyright: the Checker Framework developers copyright: the Checker Framework developers
license_file_path: licenses/bin/checker-qual.MIT license_file_path: licenses/bin/checker-qual.MIT
libraries: libraries:

View File

@ -93,7 +93,7 @@
<datasketches.memory.version>2.2.0</datasketches.memory.version> <datasketches.memory.version>2.2.0</datasketches.memory.version>
<derby.version>10.14.2.0</derby.version> <derby.version>10.14.2.0</derby.version>
<dropwizard.metrics.version>4.2.22</dropwizard.metrics.version> <dropwizard.metrics.version>4.2.22</dropwizard.metrics.version>
<errorprone.version>2.20.0</errorprone.version> <errorprone.version>2.35.1</errorprone.version>
<fastutil.version>8.5.4</fastutil.version> <fastutil.version>8.5.4</fastutil.version>
<guava.version>32.0.1-jre</guava.version> <guava.version>32.0.1-jre</guava.version>
<guice.version>4.1.0</guice.version> <guice.version>4.1.0</guice.version>
@ -114,19 +114,19 @@
<jna.version>5.13.0</jna.version> <jna.version>5.13.0</jna.version>
<jna-platform.version>5.13.0</jna-platform.version> <jna-platform.version>5.13.0</jna-platform.version>
<hadoop.compile.version>3.3.6</hadoop.compile.version> <hadoop.compile.version>3.3.6</hadoop.compile.version>
<mockito.version>5.5.0</mockito.version> <mockito.version>5.14.2</mockito.version>
<!-- mockito-inline artifact was removed in mockito 5.3 (mockito 5.x is required for Java >17), <!-- mockito-inline artifact was removed in mockito 5.3 (mockito 5.x is required for Java >17),
however it is required in some cases when running against mockito 4.x (mockito 4.x is required for Java <11. however it is required in some cases when running against mockito 4.x (mockito 4.x is required for Java <11.
We use the following property to pick the proper artifact based on Java version (see pre-java-11 profile) --> We use the following property to pick the proper artifact based on Java version (see pre-java-11 profile) -->
<mockito.inline.artifact>core</mockito.inline.artifact> <mockito.inline.artifact>core</mockito.inline.artifact>
<aws.sdk.version>1.12.638</aws.sdk.version> <aws.sdk.version>1.12.638</aws.sdk.version>
<caffeine.version>2.8.0</caffeine.version> <caffeine.version>2.8.0</caffeine.version>
<jacoco.version>0.8.7</jacoco.version> <jacoco.version>0.8.12</jacoco.version>
<hibernate-validator.version>6.2.5.Final</hibernate-validator.version> <hibernate-validator.version>6.2.5.Final</hibernate-validator.version>
<httpclient.version>4.5.13</httpclient.version> <httpclient.version>4.5.13</httpclient.version>
<!-- When upgrading ZK, edit docs and integration tests as well (integration-tests/docker-base/setup.sh) --> <!-- When upgrading ZK, edit docs and integration tests as well (integration-tests/docker-base/setup.sh) -->
<zookeeper.version>3.8.4</zookeeper.version> <zookeeper.version>3.8.4</zookeeper.version>
<checkerframework.version>2.5.7</checkerframework.version> <checkerframework.version>3.48.1</checkerframework.version>
<com.google.apis.client.version>2.2.0</com.google.apis.client.version> <com.google.apis.client.version>2.2.0</com.google.apis.client.version>
<com.google.http.client.apis.version>1.42.3</com.google.http.client.apis.version> <com.google.http.client.apis.version>1.42.3</com.google.http.client.apis.version>
<com.google.apis.compute.version>v1-rev20230606-2.0.0</com.google.apis.compute.version> <com.google.apis.compute.version>v1-rev20230606-2.0.0</com.google.apis.compute.version>

View File

@ -115,13 +115,13 @@ public class ScanOperatorFactory implements OperatorFactory
if (this == o) { if (this == o) {
return true; return true;
} }
if (!(o instanceof ScanOperatorFactory)) { if (o == null || getClass() != o.getClass()) {
return false; return false;
} }
ScanOperatorFactory that = (ScanOperatorFactory) o; ScanOperatorFactory that = (ScanOperatorFactory) o;
return Objects.equals(offsetLimit, that.offsetLimit) return Objects.equals(timeRange, that.timeRange)
&& Objects.equals(timeRange, that.timeRange)
&& Objects.equals(filter, that.filter) && Objects.equals(filter, that.filter)
&& Objects.equals(offsetLimit, that.offsetLimit)
&& Objects.equals(projectedColumns, that.projectedColumns) && Objects.equals(projectedColumns, that.projectedColumns)
&& Objects.equals(virtualColumns, that.virtualColumns) && Objects.equals(virtualColumns, that.virtualColumns)
&& Objects.equals(ordering, that.ordering); && Objects.equals(ordering, that.ordering);

View File

@ -48,7 +48,6 @@ public class AllocationMetricCollectorTest
} }
long delta = collector.calculateDelta(); long delta = collector.calculateDelta();
Assert.assertNotNull(delta);
Assert.assertTrue(delta > 0); Assert.assertTrue(delta > 0);
log.info("First delta: %s", delta); log.info("First delta: %s", delta);

View File

@ -19,7 +19,8 @@
package org.apache.druid.java.util.metrics; package org.apache.druid.java.util.metrics;
import org.junit.Assert; import org.hamcrest.MatcherAssert;
import org.hamcrest.Matchers;
import org.junit.Test; import org.junit.Test;
public class JvmPidDiscovererTest public class JvmPidDiscovererTest
@ -27,6 +28,9 @@ public class JvmPidDiscovererTest
@Test @Test
public void getPid() public void getPid()
{ {
Assert.assertNotNull(JvmPidDiscoverer.instance().getPid()); MatcherAssert.assertThat(
JvmPidDiscoverer.instance().getPid(),
Matchers.greaterThan(0L)
);
} }
} }

View File

@ -23,6 +23,7 @@ import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.InjectableValues; import com.fasterxml.jackson.databind.InjectableValues;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableSet; import com.google.common.collect.ImmutableSet;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.apache.druid.common.config.NullHandling; import org.apache.druid.common.config.NullHandling;
import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.druid.java.util.common.Intervals; import org.apache.druid.java.util.common.Intervals;
@ -58,24 +59,7 @@ public class ScanOperatorFactoryTest
@Test @Test
public void testEquals() public void testEquals()
{ {
final Builder bob = new Builder(); EqualsVerifier.forClass(ScanOperatorFactory.class).usingGetClass().verify();
bob.timeRange = Intervals.utc(0, 6);
bob.filter = DimFilters.dimEquals("abc", "b");
bob.offsetLimit = OffsetLimit.limit(48);
bob.projectedColumns = Arrays.asList("a", "b");
bob.virtualColumns = VirtualColumns.EMPTY;
bob.ordering = Collections.singletonList(ColumnWithDirection.ascending("a"));
ScanOperatorFactory factory = bob.build();
Assert.assertEquals(factory, factory);
Assert.assertNotEquals(factory, new Object());
Assert.assertNotEquals(factory, bob.copy().setTimeRange(null).build());
Assert.assertNotEquals(factory, bob.copy().setFilter(null).build());
Assert.assertNotEquals(factory, bob.copy().setOffsetLimit(null).build());
Assert.assertNotEquals(factory, bob.copy().setProjectedColumns(null).build());
Assert.assertNotEquals(factory, bob.copy().setVirtualColumns(null).build());
Assert.assertNotEquals(factory, bob.copy().setOrdering(null).build());
} }
@Test @Test

View File

@ -22,6 +22,7 @@ package org.apache.druid.segment;
import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableList;
import com.google.common.primitives.Longs; import com.google.common.primitives.Longs;
import nl.jqno.equalsverifier.EqualsVerifier;
import org.apache.druid.query.dimension.DefaultDimensionSpec; import org.apache.druid.query.dimension.DefaultDimensionSpec;
import org.apache.druid.query.dimension.DimensionSpec; import org.apache.druid.query.dimension.DimensionSpec;
import org.apache.druid.query.dimension.ExtractionDimensionSpec; import org.apache.druid.query.dimension.ExtractionDimensionSpec;
@ -395,26 +396,16 @@ public class VirtualColumnsTest extends InitializedNullHandlingTest
@Test @Test
public void testEqualsAndHashCode() public void testEqualsAndHashCode()
{ {
final VirtualColumns virtualColumns = VirtualColumns.create( EqualsVerifier.forClass(VirtualColumns.class)
ImmutableList.of( .usingGetClass()
new ExpressionVirtualColumn("expr", "x + y", ColumnType.FLOAT, TestExprMacroTable.INSTANCE) .withIgnoredFields(
"virtualColumnNames",
"equivalence",
"withDotSupport",
"withoutDotSupport",
"hasNoDotColumns"
) )
); .verify();
final VirtualColumns virtualColumns2 = VirtualColumns.create(
ImmutableList.of(
new ExpressionVirtualColumn("expr", "x + y", ColumnType.FLOAT, TestExprMacroTable.INSTANCE)
)
);
Assert.assertEquals(virtualColumns, virtualColumns);
Assert.assertEquals(virtualColumns, virtualColumns2);
Assert.assertNotEquals(VirtualColumns.EMPTY, virtualColumns);
Assert.assertNotEquals(VirtualColumns.EMPTY, null);
Assert.assertEquals(virtualColumns.hashCode(), virtualColumns.hashCode());
Assert.assertEquals(virtualColumns.hashCode(), virtualColumns2.hashCode());
Assert.assertNotEquals(VirtualColumns.EMPTY.hashCode(), virtualColumns.hashCode());
} }
@Test @Test

View File

@ -42,7 +42,6 @@ public class TableMetadataTest
public void testId() public void testId()
{ {
TableId id1 = new TableId("schema", "table"); TableId id1 = new TableId("schema", "table");
assertEquals(id1, id1);
assertEquals("schema", id1.schema()); assertEquals("schema", id1.schema());
assertEquals("table", id1.name()); assertEquals("table", id1.name());
assertEquals("\"schema\".\"table\"", id1.sqlName()); assertEquals("\"schema\".\"table\"", id1.sqlName());

View File

@ -1389,7 +1389,6 @@ public class DruidAvaticaHandlerTest extends CalciteTestBase
), ),
rows rows
); );
Assert.assertEquals(rows, rows);
} }
} }