mirror of https://github.com/apache/druid.git
Allow compilation as Java8 source and target (#3328)
* Allow compilation as Java8 source and target for everything except API * Remove conditions in tests which assume that we may run with Java 7 * Update easymock to 3.4 * Make Animal Sniffer to check Java 1.8 usage; remove redundant druid-caffeine-cache configuration * Use try-with-resources in LargeColumnSupportedComplexColumnSerializerTest.testSanity() * Remove java7 special for druid-api
This commit is contained in:
parent
e5c0dab12c
commit
805d85afda
|
@ -91,12 +91,6 @@ public class ReferenceCountingResourceHolderTest
|
|||
@Test(timeout = 60_000)
|
||||
public void testResourceHandlerClearedByJVM() throws InterruptedException
|
||||
{
|
||||
if (System.getProperty("java.version").startsWith("1.7")) {
|
||||
// This test is unreliable on Java 7, probably GC is not triggered by System.gc(). It is not a problem because
|
||||
// this test should ever pass on any version of Java to prove that ReferenceCountingResourceHolder doesn't
|
||||
// introduce leaks itself and actually cleans the leaked resources.
|
||||
return;
|
||||
}
|
||||
long initialLeakedResources = ReferenceCountingResourceHolder.leakedResources();
|
||||
final AtomicBoolean released = new AtomicBoolean(false);
|
||||
makeReleasingHandler(released); // Don't store the handler in a variable and don't close it, the object leaked
|
||||
|
@ -106,12 +100,6 @@ public class ReferenceCountingResourceHolderTest
|
|||
@Test(timeout = 60_000)
|
||||
public void testResourceHandlerWithReleaserClearedByJVM() throws InterruptedException
|
||||
{
|
||||
if (System.getProperty("java.version").startsWith("1.7")) {
|
||||
// This test is unreliable on Java 7, probably GC is not triggered by System.gc(). It is not a problem because
|
||||
// this test should ever pass on any version of Java to prove that ReferenceCountingResourceHolder doesn't
|
||||
// introduce leaks itself and actually cleans the leaked resources.
|
||||
return;
|
||||
}
|
||||
long initialLeakedResources = ReferenceCountingResourceHolder.leakedResources();
|
||||
final AtomicBoolean released = new AtomicBoolean(false);
|
||||
// createDanglingReleaser() need to be a separate method because otherwise JVM preserves a ref to Holder on stack
|
||||
|
|
|
@ -73,12 +73,6 @@ public class StupidPoolTest
|
|||
@Test(timeout = 60_000)
|
||||
public void testResourceHandlerClearedByJVM() throws InterruptedException
|
||||
{
|
||||
if (System.getProperty("java.version").startsWith("1.7")) {
|
||||
// This test is unreliable on Java 7, probably GC is not triggered by System.gc(). It is not a problem because
|
||||
// this test should ever pass on any version of Java to prove that StupidPool doesn't introduce leaks itself and
|
||||
// actually cleans the leaked objects.
|
||||
return;
|
||||
}
|
||||
String leakedString = createDanglingObjectHandler();
|
||||
// Wait until dangling object string is returned to the pool
|
||||
for (int i = 0; i < 6000 && poolOfString.leakedObjectsCount() == 0; i++) {
|
||||
|
|
|
@ -65,38 +65,4 @@
|
|||
<scope>test</scope>
|
||||
</dependency>
|
||||
</dependencies>
|
||||
<build>
|
||||
<plugins>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<configuration>
|
||||
<source>1.8</source>
|
||||
<target>1.8</target>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.codehaus.mojo</groupId>
|
||||
<artifactId>animal-sniffer-maven-plugin</artifactId>
|
||||
<executions>
|
||||
<!-- Override Animal Sniffer configuration for the JDK8 target. -->
|
||||
<execution>
|
||||
<id>check-java-api</id>
|
||||
<phase>test</phase>
|
||||
<goals>
|
||||
<goal>check</goal>
|
||||
</goals>
|
||||
<configuration>
|
||||
<signature>
|
||||
<groupId>org.codehaus.mojo.signature
|
||||
</groupId>
|
||||
<artifactId>java18</artifactId>
|
||||
<version>1.0</version>
|
||||
</signature>
|
||||
</configuration>
|
||||
</execution>
|
||||
</executions>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</build>
|
||||
</project>
|
||||
|
|
12
pom.xml
12
pom.xml
|
@ -647,7 +647,7 @@
|
|||
<dependency>
|
||||
<groupId>org.easymock</groupId>
|
||||
<artifactId>easymock</artifactId>
|
||||
<version>3.3</version>
|
||||
<version>3.4</version>
|
||||
<scope>test</scope>
|
||||
</dependency>
|
||||
<dependency>
|
||||
|
@ -811,7 +811,7 @@
|
|||
<signature>
|
||||
<groupId>org.codehaus.mojo.signature
|
||||
</groupId>
|
||||
<artifactId>java17</artifactId>
|
||||
<artifactId>java18</artifactId>
|
||||
<version>1.0</version>
|
||||
</signature>
|
||||
<ignores>
|
||||
|
@ -977,6 +977,14 @@
|
|||
<aggregate>true</aggregate>
|
||||
</configuration>
|
||||
</plugin>
|
||||
<plugin>
|
||||
<groupId>org.apache.maven.plugins</groupId>
|
||||
<artifactId>maven-compiler-plugin</artifactId>
|
||||
<configuration>
|
||||
<source>1.8</source>
|
||||
<target>1.8</target>
|
||||
</configuration>
|
||||
</plugin>
|
||||
</plugins>
|
||||
</pluginManagement>
|
||||
</build>
|
||||
|
|
|
@ -78,7 +78,7 @@ public class TimeBoundaryQueryTest
|
|||
);
|
||||
|
||||
|
||||
Assert.assertEquals(1, serdeQuery.getContextValue("priority"));
|
||||
Assert.assertEquals(new Integer(1), serdeQuery.getContextValue("priority"));
|
||||
Assert.assertEquals(true, serdeQuery.getContextValue("useCache"));
|
||||
Assert.assertEquals(true, serdeQuery.getContextValue("populateCache"));
|
||||
Assert.assertEquals(true, serdeQuery.getContextValue("finalize"));
|
||||
|
|
|
@ -56,35 +56,33 @@ public class LargeColumnSupportedComplexColumnSerializerTest
|
|||
2500 * Longs.BYTES
|
||||
};
|
||||
|
||||
for (int k = 0; k < columnSizes.length; k++) {
|
||||
for (int j = 0; j < cases.length; j++) {
|
||||
IOPeon peon = new TmpFileIOPeon();
|
||||
for (int columnSize : columnSizes) {
|
||||
for (int aCase : cases) {
|
||||
File tmpFile = FileUtils.getTempDirectory();
|
||||
final FileSmoosher v9Smoosher = new FileSmoosher(tmpFile);
|
||||
|
||||
LargeColumnSupportedComplexColumnSerializer serializer = LargeColumnSupportedComplexColumnSerializer
|
||||
.createWithColumnSize(peon, "test", serde.getObjectStrategy(), columnSizes[k]);
|
||||
HyperLogLogCollector baseCollector = HyperLogLogCollector.makeLatestCollector();
|
||||
try (IOPeon peon = new TmpFileIOPeon();
|
||||
FileSmoosher v9Smoosher = new FileSmoosher(tmpFile)) {
|
||||
|
||||
serializer.open();
|
||||
for (int i = 0; i < cases[j]; i++) {
|
||||
HyperLogLogCollector collector = HyperLogLogCollector.makeLatestCollector();
|
||||
byte[] hashBytes = fn.hashLong(i).asBytes();
|
||||
collector.add(hashBytes);
|
||||
baseCollector.fold(collector);
|
||||
serializer.serialize(collector);
|
||||
LargeColumnSupportedComplexColumnSerializer serializer = LargeColumnSupportedComplexColumnSerializer
|
||||
.createWithColumnSize(peon, "test", serde.getObjectStrategy(), columnSize);
|
||||
|
||||
serializer.open();
|
||||
for (int i = 0; i < aCase; i++) {
|
||||
HyperLogLogCollector collector = HyperLogLogCollector.makeLatestCollector();
|
||||
byte[] hashBytes = fn.hashLong(i).asBytes();
|
||||
collector.add(hashBytes);
|
||||
baseCollector.fold(collector);
|
||||
serializer.serialize(collector);
|
||||
}
|
||||
serializer.close();
|
||||
|
||||
try (final SmooshedWriter channel = v9Smoosher.addWithSmooshedWriter(
|
||||
"test",
|
||||
serializer.getSerializedSize()
|
||||
)) {
|
||||
serializer.writeToChannel(channel, v9Smoosher);
|
||||
}
|
||||
}
|
||||
serializer.close();
|
||||
|
||||
final SmooshedWriter channel = v9Smoosher.addWithSmooshedWriter(
|
||||
"test",
|
||||
serializer.getSerializedSize()
|
||||
);
|
||||
serializer.writeToChannel(channel, v9Smoosher);
|
||||
|
||||
channel.close();
|
||||
peon.close();
|
||||
v9Smoosher.close();
|
||||
|
||||
SmooshedFileMapper mapper = Smoosh.map(tmpFile);
|
||||
final ColumnBuilder builder = new ColumnBuilder()
|
||||
|
@ -97,7 +95,7 @@ public class LargeColumnSupportedComplexColumnSerializerTest
|
|||
ComplexColumn complexColumn = column.getComplexColumn();
|
||||
HyperLogLogCollector collector = HyperLogLogCollector.makeLatestCollector();
|
||||
|
||||
for (int i = 0; i < cases[j]; i++) {
|
||||
for (int i = 0; i < aCase; i++) {
|
||||
collector.fold((HyperLogLogCollector) complexColumn.getRowValue(i));
|
||||
}
|
||||
Assert.assertEquals(baseCollector.estimateCardinality(), collector.estimateCardinality(), 0.0);
|
||||
|
|
|
@ -395,7 +395,7 @@ public class CachingClusteredClientTest
|
|||
}
|
||||
return task instanceof Callable ?
|
||||
delegate.submit((Callable) task) :
|
||||
delegate.submit((Runnable) task);
|
||||
(ListenableFuture<T>) delegate.submit((Runnable) task);
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -57,12 +57,6 @@ public class MemcacheClientPoolTest
|
|||
@Test
|
||||
public void testClientLeakDetected() throws InterruptedException
|
||||
{
|
||||
if (System.getProperty("java.version").startsWith("1.7")) {
|
||||
// This test is unreliable on Java 7, probably GC is not triggered by System.gc(). It is not a problem because
|
||||
// this test should ever pass on any version of Java to prove that MemcacheClientPool doesn't introduce leaks
|
||||
// itself.
|
||||
return;
|
||||
}
|
||||
long initialLeakedClients = MemcacheClientPool.leakedClients();
|
||||
createDanglingClient();
|
||||
// Wait until Closer runs
|
||||
|
|
Loading…
Reference in New Issue