diff --git a/codestyle/checkstyle-suppressions.xml b/codestyle/checkstyle-suppressions.xml index 0c8300aedc6..15604f37a47 100644 --- a/codestyle/checkstyle-suppressions.xml +++ b/codestyle/checkstyle-suppressions.xml @@ -29,9 +29,6 @@ - - - diff --git a/core/src/test/java/org/apache/druid/collections/BlockingPoolTest.java b/core/src/test/java/org/apache/druid/collections/BlockingPoolTest.java index 19a21f2c089..19d6a496301 100644 --- a/core/src/test/java/org/apache/druid/collections/BlockingPoolTest.java +++ b/core/src/test/java/org/apache/druid/collections/BlockingPoolTest.java @@ -22,6 +22,7 @@ package org.apache.druid.collections; import com.google.common.base.Suppliers; import org.apache.druid.java.util.common.concurrent.Execs; import org.junit.After; +import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -34,11 +35,6 @@ import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - public class BlockingPoolTest { private ExecutorService service; @@ -85,10 +81,10 @@ public class BlockingPoolTest public void testTake() { final ReferenceCountingResourceHolder holder = pool.take(100); - assertNotNull(holder); - assertEquals(9, pool.getPoolSize()); + Assert.assertNotNull(holder); + Assert.assertEquals(9, pool.getPoolSize()); holder.close(); - assertEquals(10, pool.getPoolSize()); + Assert.assertEquals(10, pool.getPoolSize()); } @Test(timeout = 60_000L) @@ -96,7 +92,7 @@ public class BlockingPoolTest { final List> batchHolder = pool.takeBatch(10, 100L); final ReferenceCountingResourceHolder holder = pool.take(100); - assertNull(holder); + Assert.assertNull(holder); batchHolder.forEach(ReferenceCountingResourceHolder::close); } @@ -104,20 +100,20 @@ public class BlockingPoolTest public void testTakeBatch() { final List> holder = pool.takeBatch(6, 100L); - assertNotNull(holder); - assertEquals(6, holder.size()); - assertEquals(4, pool.getPoolSize()); + Assert.assertNotNull(holder); + Assert.assertEquals(6, holder.size()); + Assert.assertEquals(4, pool.getPoolSize()); holder.forEach(ReferenceCountingResourceHolder::close); - assertEquals(10, pool.getPoolSize()); + Assert.assertEquals(10, pool.getPoolSize()); } @Test(timeout = 60_000L) public void testWaitAndTakeBatch() throws InterruptedException, ExecutionException { List> batchHolder = pool.takeBatch(10, 10); - assertNotNull(batchHolder); - assertEquals(10, batchHolder.size()); - assertEquals(0, pool.getPoolSize()); + Assert.assertNotNull(batchHolder); + Assert.assertEquals(10, batchHolder.size()); + Assert.assertEquals(0, pool.getPoolSize()); final Future>> future = service.submit( () -> pool.takeBatch(8, 100) @@ -126,19 +122,19 @@ public class BlockingPoolTest batchHolder.forEach(ReferenceCountingResourceHolder::close); batchHolder = future.get(); - assertNotNull(batchHolder); - assertEquals(8, batchHolder.size()); - assertEquals(2, pool.getPoolSize()); + Assert.assertNotNull(batchHolder); + Assert.assertEquals(8, batchHolder.size()); + Assert.assertEquals(2, pool.getPoolSize()); batchHolder.forEach(ReferenceCountingResourceHolder::close); - assertEquals(10, pool.getPoolSize()); + Assert.assertEquals(10, pool.getPoolSize()); } @Test(timeout = 60_000L) public void testTakeBatchTooManyObjects() { final List> holder = pool.takeBatch(100, 100L); - assertTrue(holder.isEmpty()); + Assert.assertTrue(holder.isEmpty()); } @Test(timeout = 60_000L) @@ -148,39 +144,29 @@ public class BlockingPoolTest final int limit2 = pool.maxSize() - limit1 + 1; final Future>> f1 = service.submit( - new Callable>>() - { - @Override - public List> call() - { - List> result = new ArrayList<>(); - for (int i = 0; i < limit1; i++) { - result.add(pool.take(10)); - } - return result; + () -> { + List> result = new ArrayList<>(); + for (int i = 0; i < limit1; i++) { + result.add(pool.take(10)); } + return result; } ); final Future>> f2 = service.submit( - new Callable>>() - { - @Override - public List> call() - { - List> result = new ArrayList<>(); - for (int i = 0; i < limit2; i++) { - result.add(pool.take(10)); - } - return result; + () -> { + List> result = new ArrayList<>(); + for (int i = 0; i < limit2; i++) { + result.add(pool.take(10)); } + return result; } ); final List> r1 = f1.get(); final List> r2 = f2.get(); - assertEquals(0, pool.getPoolSize()); - assertTrue(r1.contains(null) || r2.contains(null)); + Assert.assertEquals(0, pool.getPoolSize()); + Assert.assertTrue(r1.contains(null) || r2.contains(null)); int nonNullCount = 0; for (ReferenceCountingResourceHolder holder : r1) { @@ -194,29 +180,19 @@ public class BlockingPoolTest nonNullCount++; } } - assertEquals(pool.maxSize(), nonNullCount); + Assert.assertEquals(pool.maxSize(), nonNullCount); - final Future future1 = service.submit(new Runnable() - { - @Override - public void run() - { - for (ReferenceCountingResourceHolder holder : r1) { - if (holder != null) { - holder.close(); - } + final Future future1 = service.submit(() -> { + for (ReferenceCountingResourceHolder holder : r1) { + if (holder != null) { + holder.close(); } } }); - final Future future2 = service.submit(new Runnable() - { - @Override - public void run() - { - for (ReferenceCountingResourceHolder holder : r2) { - if (holder != null) { - holder.close(); - } + final Future future2 = service.submit(() -> { + for (ReferenceCountingResourceHolder holder : r2) { + if (holder != null) { + holder.close(); } } }); @@ -224,7 +200,7 @@ public class BlockingPoolTest future1.get(); future2.get(); - assertEquals(pool.maxSize(), pool.getPoolSize()); + Assert.assertEquals(pool.maxSize(), pool.getPoolSize()); } @Test(timeout = 60_000L) @@ -243,18 +219,18 @@ public class BlockingPoolTest final List> r2 = f2.get(); if (r1 != null) { - assertTrue(r2.isEmpty()); - assertEquals(pool.maxSize() - batch1, pool.getPoolSize()); - assertEquals(batch1, r1.size()); + Assert.assertTrue(r2.isEmpty()); + Assert.assertEquals(pool.maxSize() - batch1, pool.getPoolSize()); + Assert.assertEquals(batch1, r1.size()); r1.forEach(ReferenceCountingResourceHolder::close); } else { - assertNotNull(r2); - assertEquals(pool.maxSize() - batch2, pool.getPoolSize()); - assertEquals(batch2, r2.size()); + Assert.assertNotNull(r2); + Assert.assertEquals(pool.maxSize() - batch2, pool.getPoolSize()); + Assert.assertEquals(batch2, r2.size()); r2.forEach(ReferenceCountingResourceHolder::close); } - assertEquals(pool.maxSize(), pool.getPoolSize()); + Assert.assertEquals(pool.maxSize(), pool.getPoolSize()); } @Test(timeout = 60_000L) @@ -272,35 +248,22 @@ public class BlockingPoolTest final List> r1 = f1.get(); final List> r2 = f2.get(); - assertNotNull(r1); - assertNotNull(r2); - assertEquals(batch1, r1.size()); - assertEquals(batch2, r2.size()); - assertEquals(0, pool.getPoolSize()); + Assert.assertNotNull(r1); + Assert.assertNotNull(r2); + Assert.assertEquals(batch1, r1.size()); + Assert.assertEquals(batch2, r2.size()); + Assert.assertEquals(0, pool.getPoolSize()); - final Future future1 = service.submit(new Runnable() - { - @Override - public void run() - { - r1.forEach(ReferenceCountingResourceHolder::close); - } - }); - final Future future2 = service.submit(new Runnable() - { - @Override - public void run() - { - r2.forEach(ReferenceCountingResourceHolder::close); - } - }); + final Future future1 = service.submit(() -> r1.forEach(ReferenceCountingResourceHolder::close)); + final Future future2 = service.submit(() -> r2.forEach(ReferenceCountingResourceHolder::close)); future1.get(); future2.get(); - assertEquals(pool.maxSize(), pool.getPoolSize()); + Assert.assertEquals(pool.maxSize(), pool.getPoolSize()); } + @SuppressWarnings("CatchMayIgnoreException") @Test(timeout = 60_000L) public void testConcurrentTakeBatchClose() throws ExecutionException, InterruptedException { @@ -309,28 +272,23 @@ public class BlockingPoolTest final Callable>> c2 = () -> pool.takeBatch(10, 100); final Future>> f2 = service.submit(c2); - final Future f1 = service.submit(new Runnable() - { - @Override - public void run() - { - try { - Thread.sleep(50); - } - catch (InterruptedException e) { - // ignore - } - r1.forEach(ReferenceCountingResourceHolder::close); + final Future f1 = service.submit(() -> { + try { + Thread.sleep(50); } + catch (InterruptedException e) { + // ignore + } + r1.forEach(ReferenceCountingResourceHolder::close); }); final List> r2 = f2.get(); f1.get(); - assertNotNull(r2); - assertEquals(10, r2.size()); - assertEquals(0, pool.getPoolSize()); + Assert.assertNotNull(r2); + Assert.assertEquals(10, r2.size()); + Assert.assertEquals(0, pool.getPoolSize()); r2.forEach(ReferenceCountingResourceHolder::close); - assertEquals(pool.maxSize(), pool.getPoolSize()); + Assert.assertEquals(pool.maxSize(), pool.getPoolSize()); } } diff --git a/core/src/test/java/org/apache/druid/java/util/emitter/core/ParametrizedUriEmitterTest.java b/core/src/test/java/org/apache/druid/java/util/emitter/core/ParametrizedUriEmitterTest.java index 40f79dd167b..db6660fb8f9 100644 --- a/core/src/test/java/org/apache/druid/java/util/emitter/core/ParametrizedUriEmitterTest.java +++ b/core/src/test/java/org/apache/druid/java/util/emitter/core/ParametrizedUriEmitterTest.java @@ -40,9 +40,6 @@ import java.util.List; import java.util.Map; import java.util.Properties; -import static org.apache.druid.java.util.emitter.core.EmitterTest.okResponse; -import static org.junit.Assert.assertEquals; - public class ParametrizedUriEmitterTest { private static final ObjectMapper jsonMapper = new ObjectMapper(); @@ -71,7 +68,7 @@ public class ParametrizedUriEmitterTest props.setProperty("org.apache.druid.java.util.emitter.recipientBaseUrlPattern", uriPattern); lifecycle = new Lifecycle(); Emitter emitter = Emitters.create(props, httpClient, lifecycle); - assertEquals(ParametrizedUriEmitter.class, emitter.getClass()); + Assert.assertEquals(ParametrizedUriEmitter.class, emitter.getClass()); lifecycle.start(); return emitter; } @@ -107,7 +104,7 @@ public class ParametrizedUriEmitterTest StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString() ); - return GoHandlers.immediateFuture(okResponse()); + return GoHandlers.immediateFuture(EmitterTest.okResponse()); } }.times(1) ); @@ -140,7 +137,7 @@ public class ParametrizedUriEmitterTest request.getUrl(), StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString() ); - return GoHandlers.immediateFuture(okResponse()); + return GoHandlers.immediateFuture(EmitterTest.okResponse()); } }.times(2) ); @@ -152,7 +149,8 @@ public class ParametrizedUriEmitterTest Assert.assertTrue(httpClient.succeeded()); Map expected = ImmutableMap.of( "http://example.com/test1", StringUtils.format("[%s]\n", jsonMapper.writeValueAsString(events.get(0))), - "http://example.com/test2", StringUtils.format("[%s]\n", jsonMapper.writeValueAsString(events.get(1)))); + "http://example.com/test2", StringUtils.format("[%s]\n", jsonMapper.writeValueAsString(events.get(1))) + ); Assert.assertEquals(expected, results); } @@ -181,7 +179,7 @@ public class ParametrizedUriEmitterTest StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString() ); - return GoHandlers.immediateFuture(okResponse()); + return GoHandlers.immediateFuture(EmitterTest.okResponse()); } }.times(1) ); @@ -209,7 +207,9 @@ public class ParametrizedUriEmitterTest Assert.assertEquals( e.getMessage(), StringUtils.format( - "ParametrizedUriExtractor with pattern http://example.com/{keyNotSetInEvents} requires keyNotSetInEvents to be set in event, but found %s", event.toMap()) + "ParametrizedUriExtractor with pattern http://example.com/{keyNotSetInEvents} requires keyNotSetInEvents to be set in event, but found %s", + event.toMap() + ) ); } } diff --git a/core/src/test/java/org/apache/druid/timeline/partition/IntegerPartitionChunkTest.java b/core/src/test/java/org/apache/druid/timeline/partition/IntegerPartitionChunkTest.java index 71806bb9c80..36b3915f9de 100644 --- a/core/src/test/java/org/apache/druid/timeline/partition/IntegerPartitionChunkTest.java +++ b/core/src/test/java/org/apache/druid/timeline/partition/IntegerPartitionChunkTest.java @@ -22,61 +22,83 @@ package org.apache.druid.timeline.partition; import org.junit.Assert; import org.junit.Test; -import static org.apache.druid.timeline.partition.IntegerPartitionChunk.make; - -/** - */ public class IntegerPartitionChunkTest { @Test public void testAbuts() { - IntegerPartitionChunk lhs = make(null, 10, 0, 1); + IntegerPartitionChunk lhs = IntegerPartitionChunk.make(null, 10, 0, 1); - Assert.assertTrue(lhs.abuts(make(10, null, 1, 2))); - Assert.assertFalse(lhs.abuts(make(11, null, 2, 3))); - Assert.assertFalse(lhs.abuts(make(null, null, 3, 4))); + Assert.assertTrue(lhs.abuts(IntegerPartitionChunk.make(10, null, 1, 2))); + Assert.assertFalse(lhs.abuts(IntegerPartitionChunk.make(11, null, 2, 3))); + Assert.assertFalse(lhs.abuts(IntegerPartitionChunk.make(null, null, 3, 4))); - Assert.assertFalse(make(null, null, 0, 1).abuts(make(null, null, 1, 2))); + Assert.assertFalse(IntegerPartitionChunk.make(null, null, 0, 1) + .abuts(IntegerPartitionChunk.make(null, null, 1, 2))); } @Test public void testIsStart() { - Assert.assertTrue(make(null, 10, 0, 1).isStart()); - Assert.assertFalse(make(10, null, 0, 1).isStart()); - Assert.assertFalse(make(10, 11, 0, 1).isStart()); - Assert.assertTrue(make(null, null, 0, 1).isStart()); + Assert.assertTrue(IntegerPartitionChunk.make(null, 10, 0, 1).isStart()); + Assert.assertFalse(IntegerPartitionChunk.make(10, null, 0, 1).isStart()); + Assert.assertFalse(IntegerPartitionChunk.make(10, 11, 0, 1).isStart()); + Assert.assertTrue(IntegerPartitionChunk.make(null, null, 0, 1).isStart()); } @Test public void testIsEnd() { - Assert.assertFalse(make(null, 10, 0, 1).isEnd()); - Assert.assertTrue(make(10, null, 0, 1).isEnd()); - Assert.assertFalse(make(10, 11, 0, 1).isEnd()); - Assert.assertTrue(make(null, null, 0, 1).isEnd()); + Assert.assertFalse(IntegerPartitionChunk.make(null, 10, 0, 1).isEnd()); + Assert.assertTrue(IntegerPartitionChunk.make(10, null, 0, 1).isEnd()); + Assert.assertFalse(IntegerPartitionChunk.make(10, 11, 0, 1).isEnd()); + Assert.assertTrue(IntegerPartitionChunk.make(null, null, 0, 1).isEnd()); } @Test public void testCompareTo() { - Assert.assertEquals(0, make(null, null, 0, 1).compareTo(make(null, null, 0, 1))); - Assert.assertEquals(0, make(10, null, 0, 1).compareTo(make(10, null, 0, 2))); - Assert.assertEquals(0, make(null, 10, 0, 1).compareTo(make(null, 10, 0, 2))); - Assert.assertEquals(0, make(10, 11, 0, 1).compareTo(make(10, 11, 0, 2))); - Assert.assertEquals(-1, make(null, 10, 0, 1).compareTo(make(10, null, 1, 2))); - Assert.assertEquals(-1, make(11, 20, 0, 1).compareTo(make(20, 33, 1, 1))); - Assert.assertEquals(1, make(20, 33, 1, 1).compareTo(make(11, 20, 0, 1))); - Assert.assertEquals(1, make(10, null, 1, 1).compareTo(make(null, 10, 0, 1))); + //noinspection EqualsWithItself (the intention of this first test is specifically to call compareTo with itself) + Assert.assertEquals( + 0, + IntegerPartitionChunk.make(null, null, 0, 1).compareTo(IntegerPartitionChunk.make(null, null, 0, 1)) + ); + Assert.assertEquals( + 0, + IntegerPartitionChunk.make(10, null, 0, 1).compareTo(IntegerPartitionChunk.make(10, null, 0, 2)) + ); + Assert.assertEquals( + 0, + IntegerPartitionChunk.make(null, 10, 0, 1).compareTo(IntegerPartitionChunk.make(null, 10, 0, 2)) + ); + Assert.assertEquals( + 0, + IntegerPartitionChunk.make(10, 11, 0, 1).compareTo(IntegerPartitionChunk.make(10, 11, 0, 2)) + ); + Assert.assertEquals( + -1, + IntegerPartitionChunk.make(null, 10, 0, 1).compareTo(IntegerPartitionChunk.make(10, null, 1, 2)) + ); + Assert.assertEquals( + -1, + IntegerPartitionChunk.make(11, 20, 0, 1).compareTo(IntegerPartitionChunk.make(20, 33, 1, 1)) + ); + Assert.assertEquals( + 1, + IntegerPartitionChunk.make(20, 33, 1, 1).compareTo(IntegerPartitionChunk.make(11, 20, 0, 1)) + ); + Assert.assertEquals( + 1, + IntegerPartitionChunk.make(10, null, 1, 1).compareTo(IntegerPartitionChunk.make(null, 10, 0, 1)) + ); } @Test public void testEquals() { - Assert.assertEquals(make(null, null, 0, 1), make(null, null, 0, 1)); - Assert.assertEquals(make(null, 10, 0, 1), make(null, 10, 0, 1)); - Assert.assertEquals(make(10, null, 0, 1), make(10, null, 0, 1)); - Assert.assertEquals(make(10, 11, 0, 1), make(10, 11, 0, 1)); + Assert.assertEquals(IntegerPartitionChunk.make(null, null, 0, 1), IntegerPartitionChunk.make(null, null, 0, 1)); + Assert.assertEquals(IntegerPartitionChunk.make(null, 10, 0, 1), IntegerPartitionChunk.make(null, 10, 0, 1)); + Assert.assertEquals(IntegerPartitionChunk.make(10, null, 0, 1), IntegerPartitionChunk.make(10, null, 0, 1)); + Assert.assertEquals(IntegerPartitionChunk.make(10, 11, 0, 1), IntegerPartitionChunk.make(10, 11, 0, 1)); } } diff --git a/core/src/test/java/org/apache/druid/timeline/partition/StringPartitionChunkTest.java b/core/src/test/java/org/apache/druid/timeline/partition/StringPartitionChunkTest.java index c178f4c4e32..e3f98462903 100644 --- a/core/src/test/java/org/apache/druid/timeline/partition/StringPartitionChunkTest.java +++ b/core/src/test/java/org/apache/druid/timeline/partition/StringPartitionChunkTest.java @@ -22,61 +22,89 @@ package org.apache.druid.timeline.partition; import org.junit.Assert; import org.junit.Test; -import static org.apache.druid.timeline.partition.StringPartitionChunk.make; - -/** - */ public class StringPartitionChunkTest { @Test public void testAbuts() { - StringPartitionChunk lhs = make(null, "10", 0, 1); + StringPartitionChunk lhs = StringPartitionChunk.make(null, "10", 0, 1); - Assert.assertTrue(lhs.abuts(make("10", null, 1, 2))); - Assert.assertFalse(lhs.abuts(make("11", null, 2, 3))); - Assert.assertFalse(lhs.abuts(make(null, null, 3, 4))); + Assert.assertTrue(lhs.abuts(StringPartitionChunk.make("10", null, 1, 2))); + Assert.assertFalse(lhs.abuts(StringPartitionChunk.make("11", null, 2, 3))); + Assert.assertFalse(lhs.abuts(StringPartitionChunk.make(null, null, 3, 4))); - Assert.assertFalse(make(null, null, 0, 1).abuts(make(null, null, 1, 2))); + Assert.assertFalse(StringPartitionChunk.make(null, null, 0, 1).abuts(StringPartitionChunk.make(null, null, 1, 2))); } @Test public void testIsStart() { - Assert.assertTrue(make(null, "10", 0, 1).isStart()); - Assert.assertFalse(make("10", null, 0, 1).isStart()); - Assert.assertFalse(make("10", "11", 0, 1).isStart()); - Assert.assertTrue(make(null, null, 0, 1).isStart()); + Assert.assertTrue(StringPartitionChunk.make(null, "10", 0, 1).isStart()); + Assert.assertFalse(StringPartitionChunk.make("10", null, 0, 1).isStart()); + Assert.assertFalse(StringPartitionChunk.make("10", "11", 0, 1).isStart()); + Assert.assertTrue(StringPartitionChunk.make(null, null, 0, 1).isStart()); } @Test public void testIsEnd() { - Assert.assertFalse(make(null, "10", 0, 1).isEnd()); - Assert.assertTrue(make("10", null, 0, 1).isEnd()); - Assert.assertFalse(make("10", "11", 0, 1).isEnd()); - Assert.assertTrue(make(null, null, 0, 1).isEnd()); + Assert.assertFalse(StringPartitionChunk.make(null, "10", 0, 1).isEnd()); + Assert.assertTrue(StringPartitionChunk.make("10", null, 0, 1).isEnd()); + Assert.assertFalse(StringPartitionChunk.make("10", "11", 0, 1).isEnd()); + Assert.assertTrue(StringPartitionChunk.make(null, null, 0, 1).isEnd()); } @Test public void testCompareTo() { - Assert.assertEquals(0, make(null, null, 0, 1).compareTo(make(null, null, 0, 2))); - Assert.assertEquals(0, make("10", null, 0, 1).compareTo(make("10", null, 0, 2))); - Assert.assertEquals(0, make(null, "10", 1, 1).compareTo(make(null, "10", 1, 2))); - Assert.assertEquals(0, make("10", "11", 1, 1).compareTo(make("10", "11", 1, 2))); - Assert.assertEquals(-1, make(null, "10", 0, 1).compareTo(make("10", null, 1, 2))); - Assert.assertEquals(-1, make("11", "20", 0, 1).compareTo(make("20", "33", 1, 1))); - Assert.assertEquals(1, make("20", "33", 1, 1).compareTo(make("11", "20", 0, 1))); - Assert.assertEquals(1, make("10", null, 1, 1).compareTo(make(null, "10", 0, 1))); + Assert.assertEquals( + 0, + StringPartitionChunk.make(null, null, 0, 1) + .compareTo(StringPartitionChunk.make(null, null, 0, 2)) + ); + Assert.assertEquals( + 0, + StringPartitionChunk.make("10", null, 0, 1) + .compareTo(StringPartitionChunk.make("10", null, 0, 2)) + ); + Assert.assertEquals( + 0, + StringPartitionChunk.make(null, "10", 1, 1) + .compareTo(StringPartitionChunk.make(null, "10", 1, 2)) + ); + Assert.assertEquals( + 0, + StringPartitionChunk.make("10", "11", 1, 1) + .compareTo(StringPartitionChunk.make("10", "11", 1, 2)) + ); + Assert.assertEquals( + -1, + StringPartitionChunk.make(null, "10", 0, 1) + .compareTo(StringPartitionChunk.make("10", null, 1, 2)) + ); + Assert.assertEquals( + -1, + StringPartitionChunk.make("11", "20", 0, 1) + .compareTo(StringPartitionChunk.make("20", "33", 1, 1)) + ); + Assert.assertEquals( + 1, + StringPartitionChunk.make("20", "33", 1, 1) + .compareTo(StringPartitionChunk.make("11", "20", 0, 1)) + ); + Assert.assertEquals( + 1, + StringPartitionChunk.make("10", null, 1, 1) + .compareTo(StringPartitionChunk.make(null, "10", 0, 1)) + ); } @Test public void testEquals() { - Assert.assertEquals(make(null, null, 0, 1), make(null, null, 0, 1)); - Assert.assertEquals(make(null, "10", 0, 1), make(null, "10", 0, 1)); - Assert.assertEquals(make("10", null, 0, 1), make("10", null, 0, 1)); - Assert.assertEquals(make("10", "11", 0, 1), make("10", "11", 0, 1)); + Assert.assertEquals(StringPartitionChunk.make(null, null, 0, 1), StringPartitionChunk.make(null, null, 0, 1)); + Assert.assertEquals(StringPartitionChunk.make(null, "10", 0, 1), StringPartitionChunk.make(null, "10", 0, 1)); + Assert.assertEquals(StringPartitionChunk.make("10", null, 0, 1), StringPartitionChunk.make("10", null, 0, 1)); + Assert.assertEquals(StringPartitionChunk.make("10", "11", 0, 1), StringPartitionChunk.make("10", "11", 0, 1)); } } diff --git a/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureByteSourceTest.java b/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureByteSourceTest.java index 539d567944b..2947a7dedfa 100644 --- a/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureByteSourceTest.java +++ b/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureByteSourceTest.java @@ -20,6 +20,7 @@ package org.apache.druid.storage.azure; import com.microsoft.azure.storage.StorageException; +import org.easymock.EasyMock; import org.easymock.EasyMockSupport; import org.junit.Test; @@ -27,8 +28,6 @@ import java.io.IOException; import java.io.InputStream; import java.net.URISyntaxException; -import static org.easymock.EasyMock.expect; - public class AzureByteSourceTest extends EasyMockSupport { @@ -40,7 +39,7 @@ public class AzureByteSourceTest extends EasyMockSupport AzureStorage azureStorage = createMock(AzureStorage.class); InputStream stream = createMock(InputStream.class); - expect(azureStorage.getBlobInputStream(containerName, blobPath)).andReturn(stream); + EasyMock.expect(azureStorage.getBlobInputStream(containerName, blobPath)).andReturn(stream); replayAll(); @@ -58,7 +57,7 @@ public class AzureByteSourceTest extends EasyMockSupport final String blobPath = "/path/to/file"; AzureStorage azureStorage = createMock(AzureStorage.class); - expect(azureStorage.getBlobInputStream(containerName, blobPath)).andThrow( + EasyMock.expect(azureStorage.getBlobInputStream(containerName, blobPath)).andThrow( new StorageException( "", "", diff --git a/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentKillerTest.java b/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentKillerTest.java index 9657836d57e..827d5538396 100644 --- a/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentKillerTest.java +++ b/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentKillerTest.java @@ -25,6 +25,7 @@ import org.apache.druid.java.util.common.Intervals; import org.apache.druid.segment.loading.SegmentLoadingException; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.partition.NoneShardSpec; +import org.easymock.EasyMock; import org.easymock.EasyMockSupport; import org.junit.Before; import org.junit.Test; @@ -34,19 +35,16 @@ import java.nio.file.Paths; import java.util.ArrayList; import java.util.List; -import static org.easymock.EasyMock.expect; - public class AzureDataSegmentKillerTest extends EasyMockSupport { + private static final String CONTAINER_NAME = "container"; + private static final String BLOB_PATH = "test/2015-04-12T00:00:00.000Z_2015-04-13T00:00:00.000Z/1/0/index.zip"; - private static final String containerName = "container"; - private static final String blobPath = "test/2015-04-12T00:00:00.000Z_2015-04-13T00:00:00.000Z/1/0/index.zip"; - - private static final DataSegment dataSegment = new DataSegment( + private static final DataSegment DATA_SEGMENT = new DataSegment( "test", Intervals.of("2015-04-12/2015-04-13"), "1", - ImmutableMap.of("containerName", containerName, "blobPath", blobPath), + ImmutableMap.of("containerName", CONTAINER_NAME, "blobPath", BLOB_PATH), null, null, NoneShardSpec.instance(), @@ -67,15 +65,15 @@ public class AzureDataSegmentKillerTest extends EasyMockSupport { List deletedFiles = new ArrayList<>(); - final String dirPath = Paths.get(blobPath).getParent().toString(); + final String dirPath = Paths.get(BLOB_PATH).getParent().toString(); - expect(azureStorage.emptyCloudBlobDirectory(containerName, dirPath)).andReturn(deletedFiles); + EasyMock.expect(azureStorage.emptyCloudBlobDirectory(CONTAINER_NAME, dirPath)).andReturn(deletedFiles); replayAll(); AzureDataSegmentKiller killer = new AzureDataSegmentKiller(azureStorage); - killer.kill(dataSegment); + killer.kill(DATA_SEGMENT); verifyAll(); } @@ -84,9 +82,9 @@ public class AzureDataSegmentKillerTest extends EasyMockSupport public void killWithErrorTest() throws SegmentLoadingException, URISyntaxException, StorageException { - String dirPath = Paths.get(blobPath).getParent().toString(); + String dirPath = Paths.get(BLOB_PATH).getParent().toString(); - expect(azureStorage.emptyCloudBlobDirectory(containerName, dirPath)).andThrow( + EasyMock.expect(azureStorage.emptyCloudBlobDirectory(CONTAINER_NAME, dirPath)).andThrow( new StorageException( "", "", @@ -100,7 +98,7 @@ public class AzureDataSegmentKillerTest extends EasyMockSupport AzureDataSegmentKiller killer = new AzureDataSegmentKiller(azureStorage); - killer.kill(dataSegment); + killer.kill(DATA_SEGMENT); verifyAll(); } diff --git a/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPullerTest.java b/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPullerTest.java index 5b52dca3c80..8884754a425 100644 --- a/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPullerTest.java +++ b/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPullerTest.java @@ -22,7 +22,9 @@ package org.apache.druid.storage.azure; import com.microsoft.azure.storage.StorageException; import org.apache.druid.java.util.common.FileUtils; import org.apache.druid.segment.loading.SegmentLoadingException; +import org.easymock.EasyMock; import org.easymock.EasyMockSupport; +import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -33,11 +35,6 @@ import java.io.InputStream; import java.net.URISyntaxException; import java.nio.file.Files; -import static org.easymock.EasyMock.expect; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - public class AzureDataSegmentPullerTest extends EasyMockSupport { @@ -61,7 +58,7 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport try { final InputStream zipStream = new FileInputStream(pulledFile); - expect(azureStorage.getBlobInputStream(containerName, blobPath)).andReturn(zipStream); + EasyMock.expect(azureStorage.getBlobInputStream(containerName, blobPath)).andReturn(zipStream); replayAll(); @@ -70,9 +67,9 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport FileUtils.FileCopyResult result = puller.getSegmentFiles(containerName, blobPath, toDir); File expected = new File(toDir, SEGMENT_FILE_NAME); - assertEquals(value.length(), result.size()); - assertTrue(expected.exists()); - assertEquals(value.length(), expected.length()); + Assert.assertEquals(value.length(), result.size()); + Assert.assertTrue(expected.exists()); + Assert.assertEquals(value.length(), expected.length()); verifyAll(); } @@ -89,7 +86,7 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport final File outDir = Files.createTempDirectory("druid").toFile(); try { - expect(azureStorage.getBlobInputStream(containerName, blobPath)).andThrow( + EasyMock.expect(azureStorage.getBlobInputStream(containerName, blobPath)).andThrow( new URISyntaxException( "error", "error", @@ -103,13 +100,12 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport puller.getSegmentFiles(containerName, blobPath, outDir); - assertFalse(outDir.exists()); + Assert.assertFalse(outDir.exists()); verifyAll(); } finally { org.apache.commons.io.FileUtils.deleteDirectory(outDir); } - } } diff --git a/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPusherTest.java b/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPusherTest.java index 8a75adfee49..caa01e41ae1 100644 --- a/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPusherTest.java +++ b/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureDataSegmentPusherTest.java @@ -29,6 +29,7 @@ import org.apache.druid.java.util.common.MapUtils; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.partition.NoneShardSpec; +import org.easymock.EasyMock; import org.easymock.EasyMockSupport; import org.junit.Assert; import org.junit.Before; @@ -44,9 +45,6 @@ import java.util.HashMap; import java.util.Map; import java.util.regex.Pattern; -import static org.easymock.EasyMock.expectLastCall; -import static org.junit.Assert.assertEquals; - public class AzureDataSegmentPusherTest extends EasyMockSupport { @Rule @@ -134,7 +132,7 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport final String storageDir = pusher.getStorageDir(dataSegment, false); final String azurePath = pusher.getAzurePath(dataSegment, false); - assertEquals( + Assert.assertEquals( StringUtils.format("%s/%s", storageDir, AzureStorageDruidModule.INDEX_ZIP_FILE_NAME), azurePath ); @@ -149,7 +147,7 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport final String azurePath = pusher.getAzurePath(dataSegment, false); azureStorage.uploadBlob(compressedSegmentData, containerName, azurePath); - expectLastCall(); + EasyMock.expectLastCall(); replayAll(); @@ -161,11 +159,11 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport azurePath ); - assertEquals(compressedSegmentData.length(), pushedDataSegment.getSize()); - assertEquals(binaryVersion, (int) pushedDataSegment.getBinaryVersion()); + Assert.assertEquals(compressedSegmentData.length(), pushedDataSegment.getSize()); + Assert.assertEquals(binaryVersion, (int) pushedDataSegment.getBinaryVersion()); Map loadSpec = pushedDataSegment.getLoadSpec(); - assertEquals(AzureStorageDruidModule.SCHEME, MapUtils.getString(loadSpec, "type")); - assertEquals(azurePath, MapUtils.getString(loadSpec, "blobPath")); + Assert.assertEquals(AzureStorageDruidModule.SCHEME, MapUtils.getString(loadSpec, "type")); + Assert.assertEquals(azurePath, MapUtils.getString(loadSpec, "blobPath")); verifyAll(); } diff --git a/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureTaskLogsTest.java b/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureTaskLogsTest.java index ce134e9c2ec..0255923c7e6 100644 --- a/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureTaskLogsTest.java +++ b/extensions-contrib/azure-extensions/src/test/java/org/apache/druid/storage/azure/AzureTaskLogsTest.java @@ -25,6 +25,7 @@ import com.google.common.io.Files; import org.apache.commons.io.FileUtils; import org.apache.commons.io.IOUtils; import org.apache.druid.java.util.common.StringUtils; +import org.easymock.EasyMock; import org.easymock.EasyMockSupport; import org.junit.Assert; import org.junit.Before; @@ -35,9 +36,6 @@ import java.io.File; import java.io.StringWriter; import java.nio.charset.StandardCharsets; -import static org.easymock.EasyMock.expect; -import static org.easymock.EasyMock.expectLastCall; - public class AzureTaskLogsTest extends EasyMockSupport { @@ -66,7 +64,7 @@ public class AzureTaskLogsTest extends EasyMockSupport final File logFile = new File(tmpDir, "log"); azureStorage.uploadBlob(logFile, container, prefix + "/" + taskid + "/log"); - expectLastCall(); + EasyMock.expectLastCall(); replayAll(); @@ -85,9 +83,9 @@ public class AzureTaskLogsTest extends EasyMockSupport final String testLog = "hello this is a log"; final String blobPath = prefix + "/" + taskid + "/log"; - expect(azureStorage.getBlobExists(container, blobPath)).andReturn(true); - expect(azureStorage.getBlobLength(container, blobPath)).andReturn((long) testLog.length()); - expect(azureStorage.getBlobInputStream(container, blobPath)).andReturn( + EasyMock.expect(azureStorage.getBlobExists(container, blobPath)).andReturn(true); + EasyMock.expect(azureStorage.getBlobLength(container, blobPath)).andReturn((long) testLog.length()); + EasyMock.expect(azureStorage.getBlobInputStream(container, blobPath)).andReturn( new ByteArrayInputStream(testLog.getBytes(StandardCharsets.UTF_8))); @@ -108,9 +106,9 @@ public class AzureTaskLogsTest extends EasyMockSupport final String testLog = "hello this is a log"; final String blobPath = prefix + "/" + taskid + "/log"; - expect(azureStorage.getBlobExists(container, blobPath)).andReturn(true); - expect(azureStorage.getBlobLength(container, blobPath)).andReturn((long) testLog.length()); - expect(azureStorage.getBlobInputStream(container, blobPath)).andReturn( + EasyMock.expect(azureStorage.getBlobExists(container, blobPath)).andReturn(true); + EasyMock.expect(azureStorage.getBlobLength(container, blobPath)).andReturn((long) testLog.length()); + EasyMock.expect(azureStorage.getBlobInputStream(container, blobPath)).andReturn( new ByteArrayInputStream(testLog.getBytes(StandardCharsets.UTF_8))); @@ -131,9 +129,9 @@ public class AzureTaskLogsTest extends EasyMockSupport final String testLog = "hello this is a log"; final String blobPath = prefix + "/" + taskid + "/log"; - expect(azureStorage.getBlobExists(container, blobPath)).andReturn(true); - expect(azureStorage.getBlobLength(container, blobPath)).andReturn((long) testLog.length()); - expect(azureStorage.getBlobInputStream(container, blobPath)).andReturn( + EasyMock.expect(azureStorage.getBlobExists(container, blobPath)).andReturn(true); + EasyMock.expect(azureStorage.getBlobLength(container, blobPath)).andReturn((long) testLog.length()); + EasyMock.expect(azureStorage.getBlobInputStream(container, blobPath)).andReturn( new ByteArrayInputStream(StringUtils.toUtf8(testLog))); diff --git a/extensions-contrib/cloudfiles-extensions/src/test/java/org/apache/druid/storage/cloudfiles/CloudFilesByteSourceTest.java b/extensions-contrib/cloudfiles-extensions/src/test/java/org/apache/druid/storage/cloudfiles/CloudFilesByteSourceTest.java index dd8b7800b33..6a07063c722 100644 --- a/extensions-contrib/cloudfiles-extensions/src/test/java/org/apache/druid/storage/cloudfiles/CloudFilesByteSourceTest.java +++ b/extensions-contrib/cloudfiles-extensions/src/test/java/org/apache/druid/storage/cloudfiles/CloudFilesByteSourceTest.java @@ -19,19 +19,17 @@ package org.apache.druid.storage.cloudfiles; +import org.easymock.EasyMock; import org.easymock.EasyMockSupport; import org.jclouds.io.Payload; +import org.junit.Assert; import org.junit.Test; import java.io.IOException; import java.io.InputStream; -import static org.easymock.EasyMock.expect; -import static org.junit.Assert.assertEquals; - public class CloudFilesByteSourceTest extends EasyMockSupport { - @Test public void openStreamTest() throws IOException { @@ -42,15 +40,15 @@ public class CloudFilesByteSourceTest extends EasyMockSupport Payload payload = createMock(Payload.class); InputStream stream = createMock(InputStream.class); - expect(objectApi.get(path, 0)).andReturn(cloudFilesObject); - expect(cloudFilesObject.getPayload()).andReturn(payload); - expect(payload.openStream()).andReturn(stream); + EasyMock.expect(objectApi.get(path, 0)).andReturn(cloudFilesObject); + EasyMock.expect(cloudFilesObject.getPayload()).andReturn(payload); + EasyMock.expect(payload.openStream()).andReturn(stream); payload.close(); replayAll(); CloudFilesByteSource byteSource = new CloudFilesByteSource(objectApi, path); - assertEquals(stream, byteSource.openStream()); + Assert.assertEquals(stream, byteSource.openStream()); byteSource.closeStream(); verifyAll(); @@ -66,9 +64,9 @@ public class CloudFilesByteSourceTest extends EasyMockSupport Payload payload = createMock(Payload.class); InputStream stream = createMock(InputStream.class); - expect(objectApi.get(path, 0)).andReturn(cloudFilesObject); - expect(cloudFilesObject.getPayload()).andReturn(payload); - expect(payload.openStream()).andThrow(new IOException()).andReturn(stream); + EasyMock.expect(objectApi.get(path, 0)).andReturn(cloudFilesObject); + EasyMock.expect(cloudFilesObject.getPayload()).andReturn(payload); + EasyMock.expect(payload.openStream()).andThrow(new IOException()).andReturn(stream); payload.close(); replayAll(); @@ -78,13 +76,12 @@ public class CloudFilesByteSourceTest extends EasyMockSupport byteSource.openStream(); } catch (Exception e) { - assertEquals("Recoverable exception", e.getMessage()); + Assert.assertEquals("Recoverable exception", e.getMessage()); } - assertEquals(stream, byteSource.openStream()); + Assert.assertEquals(stream, byteSource.openStream()); byteSource.closeStream(); verifyAll(); } - } diff --git a/extensions-contrib/cloudfiles-extensions/src/test/java/org/apache/druid/storage/cloudfiles/CloudFilesObjectApiProxyTest.java b/extensions-contrib/cloudfiles-extensions/src/test/java/org/apache/druid/storage/cloudfiles/CloudFilesObjectApiProxyTest.java index 9d7036b5972..eb3b61c2149 100644 --- a/extensions-contrib/cloudfiles-extensions/src/test/java/org/apache/druid/storage/cloudfiles/CloudFilesObjectApiProxyTest.java +++ b/extensions-contrib/cloudfiles-extensions/src/test/java/org/apache/druid/storage/cloudfiles/CloudFilesObjectApiProxyTest.java @@ -19,19 +19,17 @@ package org.apache.druid.storage.cloudfiles; +import org.easymock.EasyMock; import org.easymock.EasyMockSupport; import org.jclouds.io.Payload; import org.jclouds.openstack.swift.v1.domain.SwiftObject; import org.jclouds.openstack.swift.v1.features.ObjectApi; import org.jclouds.rackspace.cloudfiles.v1.CloudFilesApi; +import org.junit.Assert; import org.junit.Test; -import static org.easymock.EasyMock.expect; -import static org.junit.Assert.assertEquals; - public class CloudFilesObjectApiProxyTest extends EasyMockSupport { - @Test public void getTest() { @@ -44,21 +42,20 @@ public class CloudFilesObjectApiProxyTest extends EasyMockSupport SwiftObject swiftObject = createMock(SwiftObject.class); Payload payload = createMock(Payload.class); - expect(cloudFilesApi.getObjectApi(region, container)).andReturn(objectApi); - expect(objectApi.get(path)).andReturn(swiftObject); - expect(swiftObject.getPayload()).andReturn(payload); + EasyMock.expect(cloudFilesApi.getObjectApi(region, container)).andReturn(objectApi); + EasyMock.expect(objectApi.get(path)).andReturn(swiftObject); + EasyMock.expect(swiftObject.getPayload()).andReturn(payload); replayAll(); CloudFilesObjectApiProxy cfoApiProxy = new CloudFilesObjectApiProxy(cloudFilesApi, region, container); CloudFilesObject cloudFilesObject = cfoApiProxy.get(path, 0); - assertEquals(cloudFilesObject.getPayload(), payload); - assertEquals(cloudFilesObject.getRegion(), region); - assertEquals(cloudFilesObject.getContainer(), container); - assertEquals(cloudFilesObject.getPath(), path); + Assert.assertEquals(cloudFilesObject.getPayload(), payload); + Assert.assertEquals(cloudFilesObject.getRegion(), region); + Assert.assertEquals(cloudFilesObject.getContainer(), container); + Assert.assertEquals(cloudFilesObject.getPath(), path); verifyAll(); } - } diff --git a/extensions-contrib/influx-extensions/src/test/java/org/apache/druid/data/input/influx/InfluxParserTest.java b/extensions-contrib/influx-extensions/src/test/java/org/apache/druid/data/input/influx/InfluxParserTest.java index b14b8bd5909..49307f91e0a 100644 --- a/extensions-contrib/influx-extensions/src/test/java/org/apache/druid/data/input/influx/InfluxParserTest.java +++ b/extensions-contrib/influx-extensions/src/test/java/org/apache/druid/data/input/influx/InfluxParserTest.java @@ -26,6 +26,8 @@ import junitparams.Parameters; import org.apache.druid.java.util.common.Pair; import org.apache.druid.java.util.common.parsers.ParseException; import org.apache.druid.java.util.common.parsers.Parser; +import org.hamcrest.MatcherAssert; +import org.hamcrest.Matchers; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; @@ -33,15 +35,14 @@ import org.junit.runner.RunWith; import java.util.HashMap; import java.util.Map; -import static org.hamcrest.MatcherAssert.assertThat; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.isA; - @RunWith(JUnitParamsRunner.class) public class InfluxParserTest { + @SuppressWarnings("unused") private String name; + @SuppressWarnings("unused") private String input; + @SuppressWarnings("unused") private Map expected; private static Object[] testCase(String name, String input, Parsed expected) @@ -49,7 +50,6 @@ public class InfluxParserTest return Lists.newArrayList(name, input, expected).toArray(); } - public Object[] testData() { return Lists.newArrayList( @@ -142,14 +142,20 @@ public class InfluxParserTest { Parser parser = new InfluxParser(null); Map parsed = parser.parseToMap(input); - assertThat("correct measurement name", parsed.get("measurement"), equalTo(expected.measurement)); - assertThat("correct timestamp", parsed.get(InfluxParser.TIMESTAMP_KEY), equalTo(expected.timestamp)); - expected.kv.forEach((k, v) -> { - assertThat("correct field " + k, parsed.get(k), equalTo(v)); - }); + MatcherAssert.assertThat( + "correct measurement name", + parsed.get("measurement"), + Matchers.equalTo(expected.measurement) + ); + MatcherAssert.assertThat( + "correct timestamp", + parsed.get(InfluxParser.TIMESTAMP_KEY), + Matchers.equalTo(expected.timestamp) + ); + expected.kv.forEach((k, v) -> MatcherAssert.assertThat("correct field " + k, parsed.get(k), Matchers.equalTo(v))); parsed.remove("measurement"); parsed.remove(InfluxParser.TIMESTAMP_KEY); - assertThat("No extra keys in parsed data", parsed.keySet(), equalTo(expected.kv.keySet())); + MatcherAssert.assertThat("No extra keys in parsed data", parsed.keySet(), Matchers.equalTo(expected.kv.keySet())); } @Test @@ -158,7 +164,7 @@ public class InfluxParserTest Parser parser = new InfluxParser(Sets.newHashSet("cpu")); String input = "cpu,host=foo.bar.baz,region=us-east,application=echo pct_idle=99.3,pct_user=88.8,m1_load=2 1465839830100400200"; Map parsed = parser.parseToMap(input); - assertThat(parsed.get("measurement"), equalTo("cpu")); + MatcherAssert.assertThat(parsed.get("measurement"), Matchers.equalTo("cpu")); } @Test @@ -170,7 +176,7 @@ public class InfluxParserTest parser.parseToMap(input); } catch (ParseException t) { - assertThat(t, isA(ParseException.class)); + MatcherAssert.assertThat(t, Matchers.isA(ParseException.class)); return; } @@ -192,10 +198,10 @@ public class InfluxParserTest { Parser parser = new InfluxParser(null); try { - Map res = parser.parseToMap(testCase.rhs); + parser.parseToMap(testCase.rhs); } catch (ParseException t) { - assertThat(t, isA(ParseException.class)); + MatcherAssert.assertThat(t, Matchers.isA(ParseException.class)); return; } @@ -206,9 +212,9 @@ public class InfluxParserTest { private String measurement; private Long timestamp; - private Map kv = new HashMap<>(); + private final Map kv = new HashMap<>(); - public static Parsed row(String measurement, Long timestamp) + static Parsed row(String measurement, Long timestamp) { Parsed e = new Parsed(); e.measurement = measurement; @@ -216,7 +222,7 @@ public class InfluxParserTest return e; } - public Parsed with(String k, Object v) + Parsed with(String k, Object v) { kv.put(k, v); return this; diff --git a/extensions-contrib/materialized-view-maintenance/src/test/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorSpecTest.java b/extensions-contrib/materialized-view-maintenance/src/test/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorSpecTest.java index 46728e2418c..e19cb784ac7 100644 --- a/extensions-contrib/materialized-view-maintenance/src/test/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorSpecTest.java +++ b/extensions-contrib/materialized-view-maintenance/src/test/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorSpecTest.java @@ -41,6 +41,7 @@ import org.apache.druid.segment.TestHelper; import org.apache.druid.segment.realtime.firehose.ChatHandlerProvider; import org.apache.druid.segment.realtime.firehose.NoopChatHandlerProvider; import org.apache.druid.server.security.AuthorizerMapper; +import org.easymock.EasyMock; import org.hamcrest.CoreMatchers; import org.junit.Assert; import org.junit.Before; @@ -50,15 +51,13 @@ import org.junit.rules.ExpectedException; import java.io.IOException; -import static org.easymock.EasyMock.createMock; - -public class MaterializedViewSupervisorSpecTest +public class MaterializedViewSupervisorSpecTest { @Rule public ExpectedException expectedException = ExpectedException.none(); - private ObjectMapper objectMapper = TestHelper.makeJsonMapper(); - + private final ObjectMapper objectMapper = TestHelper.makeJsonMapper(); + @Before public void setup() { @@ -73,53 +72,53 @@ public class MaterializedViewSupervisorSpecTest .addValue(SQLMetadataSegmentManager.class, null) .addValue(IndexerMetadataStorageCoordinator.class, null) .addValue(MaterializedViewTaskConfig.class, new MaterializedViewTaskConfig()) - .addValue(AuthorizerMapper.class, createMock(AuthorizerMapper.class)) + .addValue(AuthorizerMapper.class, EasyMock.createMock(AuthorizerMapper.class)) .addValue(ChatHandlerProvider.class, new NoopChatHandlerProvider()) .addValue(SupervisorStateManagerConfig.class, new SupervisorStateManagerConfig()) ); } @Test - public void testSupervisorSerialization() throws IOException + public void testSupervisorSerialization() throws IOException { String supervisorStr = "{\n" + - " \"type\" : \"derivativeDataSource\",\n" + - " \"baseDataSource\": \"wikiticker\",\n" + - " \"dimensionsSpec\":{\n" + - " \"dimensions\" : [\n" + - " \"isUnpatrolled\",\n" + - " \"metroCode\",\n" + - " \"namespace\",\n" + - " \"page\",\n" + - " \"regionIsoCode\",\n" + - " \"regionName\",\n" + - " \"user\"\n" + - " ]\n" + - " },\n" + - " \"metricsSpec\" : [\n" + - " {\n" + - " \"name\" : \"count\",\n" + - " \"type\" : \"count\"\n" + - " },\n" + - " {\n" + - " \"name\" : \"added\",\n" + - " \"type\" : \"longSum\",\n" + - " \"fieldName\" : \"added\"\n" + - " }\n" + - " ],\n" + - " \"tuningConfig\": {\n" + - " \"type\" : \"hadoop\"\n" + - " }\n" + - "}"; + " \"type\" : \"derivativeDataSource\",\n" + + " \"baseDataSource\": \"wikiticker\",\n" + + " \"dimensionsSpec\":{\n" + + " \"dimensions\" : [\n" + + " \"isUnpatrolled\",\n" + + " \"metroCode\",\n" + + " \"namespace\",\n" + + " \"page\",\n" + + " \"regionIsoCode\",\n" + + " \"regionName\",\n" + + " \"user\"\n" + + " ]\n" + + " },\n" + + " \"metricsSpec\" : [\n" + + " {\n" + + " \"name\" : \"count\",\n" + + " \"type\" : \"count\"\n" + + " },\n" + + " {\n" + + " \"name\" : \"added\",\n" + + " \"type\" : \"longSum\",\n" + + " \"fieldName\" : \"added\"\n" + + " }\n" + + " ],\n" + + " \"tuningConfig\": {\n" + + " \"type\" : \"hadoop\"\n" + + " }\n" + + "}"; MaterializedViewSupervisorSpec expected = new MaterializedViewSupervisorSpec( "wikiticker", new DimensionsSpec( Lists.newArrayList( new StringDimensionSchema("isUnpatrolled"), - new StringDimensionSchema("metroCode"), - new StringDimensionSchema("namespace"), - new StringDimensionSchema("page"), - new StringDimensionSchema("regionIsoCode"), + new StringDimensionSchema("metroCode"), + new StringDimensionSchema("namespace"), + new StringDimensionSchema("page"), + new StringDimensionSchema("regionIsoCode"), new StringDimensionSchema("regionName"), new StringDimensionSchema("user") ), @@ -144,7 +143,7 @@ public class MaterializedViewSupervisorSpecTest null, null, new MaterializedViewTaskConfig(), - createMock(AuthorizerMapper.class), + EasyMock.createMock(AuthorizerMapper.class), new NoopChatHandlerProvider(), new SupervisorStateManagerConfig() ); @@ -193,11 +192,17 @@ public class MaterializedViewSupervisorSpecTest Assert.assertFalse(spec.isSuspended()); String suspendedSerialized = objectMapper.writeValueAsString(spec.createSuspendedSpec()); - MaterializedViewSupervisorSpec suspendedSpec = objectMapper.readValue(suspendedSerialized, MaterializedViewSupervisorSpec.class); + MaterializedViewSupervisorSpec suspendedSpec = objectMapper.readValue( + suspendedSerialized, + MaterializedViewSupervisorSpec.class + ); Assert.assertTrue(suspendedSpec.isSuspended()); String runningSerialized = objectMapper.writeValueAsString(spec.createRunningSpec()); - MaterializedViewSupervisorSpec runningSpec = objectMapper.readValue(runningSerialized, MaterializedViewSupervisorSpec.class); + MaterializedViewSupervisorSpec runningSpec = objectMapper.readValue( + runningSerialized, + MaterializedViewSupervisorSpec.class + ); Assert.assertFalse(runningSpec.isSuspended()); } @@ -208,7 +213,8 @@ public class MaterializedViewSupervisorSpecTest expectedException.expectMessage( "baseDataSource cannot be null or empty. Please provide a baseDataSource." ); - MaterializedViewSupervisorSpec materializedViewSupervisorSpec = new MaterializedViewSupervisorSpec( + //noinspection ResultOfObjectAllocationIgnored (this method call will trigger the expected exception) + new MaterializedViewSupervisorSpec( "", new DimensionsSpec( Lists.newArrayList( @@ -241,7 +247,7 @@ public class MaterializedViewSupervisorSpecTest null, null, new MaterializedViewTaskConfig(), - createMock(AuthorizerMapper.class), + EasyMock.createMock(AuthorizerMapper.class), new NoopChatHandlerProvider(), new SupervisorStateManagerConfig() ); @@ -254,7 +260,8 @@ public class MaterializedViewSupervisorSpecTest expectedException.expectMessage( "baseDataSource cannot be null or empty. Please provide a baseDataSource." ); - MaterializedViewSupervisorSpec materializedViewSupervisorSpec = new MaterializedViewSupervisorSpec( + //noinspection ResultOfObjectAllocationIgnored (this method call will trigger the expected exception) + new MaterializedViewSupervisorSpec( null, new DimensionsSpec( Lists.newArrayList( @@ -287,7 +294,7 @@ public class MaterializedViewSupervisorSpecTest null, null, new MaterializedViewTaskConfig(), - createMock(AuthorizerMapper.class), + EasyMock.createMock(AuthorizerMapper.class), new NoopChatHandlerProvider(), new SupervisorStateManagerConfig() ); diff --git a/extensions-contrib/materialized-view-maintenance/src/test/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorTest.java b/extensions-contrib/materialized-view-maintenance/src/test/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorTest.java index 3b9061de60c..0ac7f6678a1 100644 --- a/extensions-contrib/materialized-view-maintenance/src/test/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorTest.java +++ b/extensions-contrib/materialized-view-maintenance/src/test/java/org/apache/druid/indexing/materializedview/MaterializedViewSupervisorTest.java @@ -68,9 +68,6 @@ import java.util.Map; import java.util.Set; import java.util.SortedMap; -import static org.easymock.EasyMock.createMock; -import static org.easymock.EasyMock.expect; - public class MaterializedViewSupervisorTest { @Rule @@ -78,7 +75,6 @@ public class MaterializedViewSupervisorTest @Rule public final ExpectedException expectedException = ExpectedException.none(); - private TestDerbyConnector derbyConnector; private TaskStorage taskStorage; private TaskMaster taskMaster; private IndexerMetadataStorageCoordinator indexerMetadataStorageCoordinator; @@ -86,28 +82,27 @@ public class MaterializedViewSupervisorTest private SQLMetadataSegmentManager sqlMetadataSegmentManager; private TaskQueue taskQueue; private MaterializedViewSupervisor supervisor; - private MaterializedViewSupervisorSpec spec; - private ObjectMapper objectMapper = TestHelper.makeJsonMapper(); - + private final ObjectMapper objectMapper = TestHelper.makeJsonMapper(); + @Before public void setUp() { - derbyConnector = derbyConnectorRule.getConnector(); + TestDerbyConnector derbyConnector = derbyConnectorRule.getConnector(); derbyConnector.createDataSourceTable(); derbyConnector.createSegmentTable(); - taskStorage = createMock(TaskStorage.class); - taskMaster = createMock(TaskMaster.class); + taskStorage = EasyMock.createMock(TaskStorage.class); + taskMaster = EasyMock.createMock(TaskMaster.class); indexerMetadataStorageCoordinator = new IndexerSQLMetadataStorageCoordinator( objectMapper, derbyConnectorRule.metadataTablesConfigSupplier().get(), derbyConnector ); - metadataSupervisorManager = createMock(MetadataSupervisorManager.class); - sqlMetadataSegmentManager = createMock(SQLMetadataSegmentManager.class); - taskQueue = createMock(TaskQueue.class); + metadataSupervisorManager = EasyMock.createMock(MetadataSupervisorManager.class); + sqlMetadataSegmentManager = EasyMock.createMock(SQLMetadataSegmentManager.class); + taskQueue = EasyMock.createMock(TaskQueue.class); taskQueue.start(); objectMapper.registerSubtypes(new NamedType(HashBasedNumberedShardSpec.class, "hashed")); - spec = new MaterializedViewSupervisorSpec( + MaterializedViewSupervisorSpec spec = new MaterializedViewSupervisorSpec( "base", new DimensionsSpec(Collections.singletonList(new StringDimensionSchema("dim")), null, null), new AggregatorFactory[]{new LongSumAggregatorFactory("m1", "m1")}, @@ -125,8 +120,8 @@ public class MaterializedViewSupervisorTest sqlMetadataSegmentManager, indexerMetadataStorageCoordinator, new MaterializedViewTaskConfig(), - createMock(AuthorizerMapper.class), - createMock(ChatHandlerProvider.class), + EasyMock.createMock(AuthorizerMapper.class), + EasyMock.createMock(ChatHandlerProvider.class), new SupervisorStateManagerConfig() ); supervisor = (MaterializedViewSupervisor) spec.createSupervisor(); @@ -160,9 +155,9 @@ public class MaterializedViewSupervisorTest ) ); indexerMetadataStorageCoordinator.announceHistoricalSegments(baseSegments); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); Pair, Map>> toBuildInterval = supervisor.checkSegments(); Map> expectedSegments = new HashMap<>(); expectedSegments.put( @@ -201,11 +196,15 @@ public class MaterializedViewSupervisorTest ) ); indexerMetadataStorageCoordinator.announceHistoricalSegments(baseSegments); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - expect(taskStorage.getStatus("test_task1")).andReturn(Optional.of(TaskStatus.failure("test_task1"))).anyTimes(); - expect(taskStorage.getStatus("test_task2")).andReturn(Optional.of(TaskStatus.running("test_task2"))).anyTimes(); + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + EasyMock.expect(taskStorage.getStatus("test_task1")) + .andReturn(Optional.of(TaskStatus.failure("test_task1"))) + .anyTimes(); + EasyMock.expect(taskStorage.getStatus("test_task2")) + .andReturn(Optional.of(TaskStatus.running("test_task2"))) + .anyTimes(); EasyMock.replay(taskStorage); Pair, Map> runningTasksPair = supervisor.getRunningTasks(); @@ -283,21 +282,21 @@ public class MaterializedViewSupervisorTest sqlMetadataSegmentManager, indexerMetadataStorageCoordinator, new MaterializedViewTaskConfig(), - createMock(AuthorizerMapper.class), - createMock(ChatHandlerProvider.class), + EasyMock.createMock(AuthorizerMapper.class), + EasyMock.createMock(ChatHandlerProvider.class), new SupervisorStateManagerConfig() ); MaterializedViewSupervisor supervisor = (MaterializedViewSupervisor) suspended.createSupervisor(); // mock IndexerSQLMetadataStorageCoordinator to ensure that getDataSourceMetadata is not called // which will be true if truly suspended, since this is the first operation of the 'run' method otherwise - IndexerSQLMetadataStorageCoordinator mock = createMock(IndexerSQLMetadataStorageCoordinator.class); - expect(mock.getDataSourceMetadata(suspended.getDataSourceName())) - .andAnswer(() -> { - Assert.fail(); - return null; - }) - .anyTimes(); + IndexerSQLMetadataStorageCoordinator mock = EasyMock.createMock(IndexerSQLMetadataStorageCoordinator.class); + EasyMock.expect(mock.getDataSourceMetadata(suspended.getDataSourceName())) + .andAnswer(() -> { + Assert.fail(); + return null; + }) + .anyTimes(); EasyMock.replay(mock); supervisor.run(); diff --git a/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/DatasourceOptimizerTest.java b/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/DatasourceOptimizerTest.java index 87e3d0d93cc..e73f5558445 100644 --- a/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/DatasourceOptimizerTest.java +++ b/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/DatasourceOptimizerTest.java @@ -42,6 +42,7 @@ import org.apache.druid.java.util.http.client.HttpClient; import org.apache.druid.metadata.IndexerSQLMetadataStorageCoordinator; import org.apache.druid.metadata.TestDerbyConnector; import org.apache.druid.query.Query; +import org.apache.druid.query.QueryRunnerTestHelper; import org.apache.druid.query.QueryToolChestWarehouse; import org.apache.druid.query.QueryWatcher; import org.apache.druid.query.aggregation.LongSumAggregatorFactory; @@ -69,37 +70,33 @@ import java.util.Set; import java.util.concurrent.Executor; import java.util.concurrent.TimeUnit; -import static org.apache.druid.query.QueryRunnerTestHelper.allGran; - -public class DatasourceOptimizerTest extends CuratorTestBase +public class DatasourceOptimizerTest extends CuratorTestBase { @Rule public final TestDerbyConnector.DerbyConnectorRule derbyConnectorRule = new TestDerbyConnector.DerbyConnectorRule(); - private TestDerbyConnector derbyConnector; private DerivativeDataSourceManager derivativesManager; private DruidServer druidServer; private ObjectMapper jsonMapper; private ZkPathsConfig zkPathsConfig; private DataSourceOptimizer optimizer; - private MaterializedViewConfig viewConfig; private IndexerSQLMetadataStorageCoordinator metadataStorageCoordinator; private BatchServerInventoryView baseView; private BrokerServerView brokerServerView; - + @Before public void setUp() throws Exception { - derbyConnector = derbyConnectorRule.getConnector(); + TestDerbyConnector derbyConnector = derbyConnectorRule.getConnector(); derbyConnector.createDataSourceTable(); derbyConnector.createSegmentTable(); - viewConfig = new MaterializedViewConfig(); + MaterializedViewConfig viewConfig = new MaterializedViewConfig(); jsonMapper = TestHelper.makeJsonMapper(); jsonMapper.registerSubtypes(new NamedType(DerivativeDataSourceMetadata.class, "view")); metadataStorageCoordinator = EasyMock.createMock(IndexerSQLMetadataStorageCoordinator.class); derivativesManager = new DerivativeDataSourceManager( - viewConfig, - derbyConnectorRule.metadataTablesConfigSupplier(), - jsonMapper, + viewConfig, + derbyConnectorRule.metadataTablesConfigSupplier(), + jsonMapper, derbyConnector ); metadataStorageCoordinator = new IndexerSQLMetadataStorageCoordinator( @@ -107,14 +104,14 @@ public class DatasourceOptimizerTest extends CuratorTestBase derbyConnectorRule.metadataTablesConfigSupplier().get(), derbyConnector ); - + setupServerAndCurator(); curator.start(); curator.blockUntilConnected(); - + zkPathsConfig = new ZkPathsConfig(); setupViews(); - + druidServer = new DruidServer( "localhost:1234", "localhost:1234", @@ -127,14 +124,14 @@ public class DatasourceOptimizerTest extends CuratorTestBase setupZNodeForServer(druidServer, new ZkPathsConfig(), jsonMapper); optimizer = new DataSourceOptimizer(brokerServerView); } - + @After - public void tearDown() throws IOException + public void tearDown() throws IOException { baseView.stop(); tearDownServerAndCurator(); } - + @Test(timeout = 60_000L) public void testOptimize() throws InterruptedException { @@ -156,10 +153,10 @@ public class DatasourceOptimizerTest extends CuratorTestBase ), interval -> { final DataSegment segment = createDataSegment( - "base", - interval, + "base", + interval, "v1", - Lists.newArrayList("dim1", "dim2", "dim3", "dim4"), + Lists.newArrayList("dim1", "dim2", "dim3", "dim4"), 1024 * 1024 ); try { @@ -180,7 +177,13 @@ public class DatasourceOptimizerTest extends CuratorTestBase "2011-04-03/2011-04-04" ), interval -> { - final DataSegment segment = createDataSegment("derivative", interval, "v1", Lists.newArrayList("dim1", "dim2", "dim3"), 1024); + final DataSegment segment = createDataSegment( + "derivative", + interval, + "v1", + Lists.newArrayList("dim1", "dim2", "dim3"), + 1024 + ); try { metadataStorageCoordinator.announceHistoricalSegments(Sets.newHashSet(segment)); announceSegmentForServer(druidServer, segment, zkPathsConfig, jsonMapper); @@ -200,7 +203,7 @@ public class DatasourceOptimizerTest extends CuratorTestBase // build user query TopNQuery userQuery = new TopNQueryBuilder() .dataSource("base") - .granularity(allGran) + .granularity(QueryRunnerTestHelper.allGran) .dimension("dim1") .metric("cost") .threshold(4) @@ -209,11 +212,11 @@ public class DatasourceOptimizerTest extends CuratorTestBase Collections.singletonList(new LongSumAggregatorFactory("cost", "cost")) ) .build(); - + List expectedQueryAfterOptimizing = Lists.newArrayList( new TopNQueryBuilder() .dataSource("derivative") - .granularity(allGran) + .granularity(QueryRunnerTestHelper.allGran) .dimension("dim1") .metric("cost") .threshold(4) @@ -224,7 +227,7 @@ public class DatasourceOptimizerTest extends CuratorTestBase .build(), new TopNQueryBuilder() .dataSource("base") - .granularity(allGran) + .granularity(QueryRunnerTestHelper.allGran) .dimension("dim1") .metric("cost") .threshold(4) @@ -237,27 +240,27 @@ public class DatasourceOptimizerTest extends CuratorTestBase Assert.assertEquals(expectedQueryAfterOptimizing, optimizer.optimize(userQuery)); derivativesManager.stop(); } - + private DataSegment createDataSegment(String name, String intervalStr, String version, List dims, long size) { return DataSegment.builder() - .dataSource(name) - .interval(Intervals.of(intervalStr)) - .loadSpec( - ImmutableMap.of( - "type", - "local", - "path", - "somewhere" - ) - ) - .version(version) - .dimensions(dims) - .metrics(ImmutableList.of("cost")) - .shardSpec(NoneShardSpec.instance()) - .binaryVersion(9) - .size(size) - .build(); + .dataSource(name) + .interval(Intervals.of(intervalStr)) + .loadSpec( + ImmutableMap.of( + "type", + "local", + "path", + "somewhere" + ) + ) + .version(version) + .dimensions(dims) + .metrics(ImmutableList.of("cost")) + .shardSpec(NoneShardSpec.instance()) + .binaryVersion(9) + .size(size) + .build(); } private void setupViews() throws Exception @@ -274,22 +277,19 @@ public class DatasourceOptimizerTest extends CuratorTestBase @Override public CallbackAction segmentAdded(DruidServerMetadata server, DataSegment segment) { - CallbackAction res = callback.segmentAdded(server, segment); - return res; + return callback.segmentAdded(server, segment); } @Override public CallbackAction segmentRemoved(DruidServerMetadata server, DataSegment segment) { - CallbackAction res = callback.segmentRemoved(server, segment); - return res; + return callback.segmentRemoved(server, segment); } @Override public CallbackAction segmentViewInitialized() { - CallbackAction res = callback.segmentViewInitialized(); - return res; + return callback.segmentViewInitialized(); } } ); @@ -318,5 +318,4 @@ public class DatasourceOptimizerTest extends CuratorTestBase retVal.getFactory().setCodec(retVal); return retVal; } - } diff --git a/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/MaterializedViewQueryTest.java b/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/MaterializedViewQueryTest.java index e8f20267b72..6a46a341cf9 100644 --- a/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/MaterializedViewQueryTest.java +++ b/extensions-contrib/materialized-view-selection/src/test/java/org/apache/druid/query/materializedview/MaterializedViewQueryTest.java @@ -26,6 +26,7 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import org.apache.druid.math.expr.ExprMacroTable; import org.apache.druid.query.Query; +import org.apache.druid.query.QueryRunnerTestHelper; import org.apache.druid.query.TableDataSource; import org.apache.druid.query.aggregation.DoubleMaxAggregatorFactory; import org.apache.druid.query.aggregation.DoubleMinAggregatorFactory; @@ -41,21 +42,13 @@ import org.junit.Test; import java.io.IOException; import java.util.Collections; -import static org.apache.druid.query.QueryRunnerTestHelper.addRowsIndexConstant; -import static org.apache.druid.query.QueryRunnerTestHelper.allGran; -import static org.apache.druid.query.QueryRunnerTestHelper.commonDoubleAggregators; -import static org.apache.druid.query.QueryRunnerTestHelper.dataSource; -import static org.apache.druid.query.QueryRunnerTestHelper.fullOnIntervalSpec; -import static org.apache.druid.query.QueryRunnerTestHelper.indexMetric; -import static org.apache.druid.query.QueryRunnerTestHelper.marketDimension; - -public class MaterializedViewQueryTest +public class MaterializedViewQueryTest { private static final ObjectMapper jsonMapper = TestHelper.makeJsonMapper(); private DataSourceOptimizer optimizer; @Before - public void setUp() + public void setUp() { jsonMapper.registerSubtypes(new NamedType(MaterializedViewQuery.class, MaterializedViewQuery.TYPE)); optimizer = EasyMock.createMock(DataSourceOptimizer.class); @@ -65,21 +58,21 @@ public class MaterializedViewQueryTest .addValue(DataSourceOptimizer.class, optimizer) ); } - + @Test public void testQuerySerialization() throws IOException { TopNQuery topNQuery = new TopNQueryBuilder() - .dataSource(dataSource) - .granularity(allGran) - .dimension(marketDimension) - .metric(indexMetric) + .dataSource(QueryRunnerTestHelper.dataSource) + .granularity(QueryRunnerTestHelper.allGran) + .dimension(QueryRunnerTestHelper.marketDimension) + .metric(QueryRunnerTestHelper.indexMetric) .threshold(4) - .intervals(fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) .aggregators( Lists.newArrayList( Iterables.concat( - commonDoubleAggregators, + QueryRunnerTestHelper.commonDoubleAggregators, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") @@ -87,14 +80,14 @@ public class MaterializedViewQueryTest ) ) ) - .postAggregators(Collections.singletonList(addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); MaterializedViewQuery query = new MaterializedViewQuery(topNQuery, optimizer); String json = jsonMapper.writeValueAsString(query); Query serdeQuery = jsonMapper.readValue(json, Query.class); Assert.assertEquals(query, serdeQuery); - Assert.assertEquals(new TableDataSource(dataSource), query.getDataSource()); - Assert.assertEquals(allGran, query.getGranularity()); - Assert.assertEquals(fullOnIntervalSpec.getIntervals(), query.getIntervals()); + Assert.assertEquals(new TableDataSource(QueryRunnerTestHelper.dataSource), query.getDataSource()); + Assert.assertEquals(QueryRunnerTestHelper.allGran, query.getGranularity()); + Assert.assertEquals(QueryRunnerTestHelper.fullOnIntervalSpec.getIntervals(), query.getIntervals()); } } diff --git a/extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/MomentSketchWrapperTest.java b/extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/MomentSketchWrapperTest.java index 74eadd0241e..75b2abb7ad1 100644 --- a/extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/MomentSketchWrapperTest.java +++ b/extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/MomentSketchWrapperTest.java @@ -19,10 +19,9 @@ package org.apache.druid.query.aggregation.momentsketch; +import org.junit.Assert; import org.junit.Test; -import static org.junit.Assert.assertEquals; - public class MomentSketchWrapperTest { @Test @@ -34,7 +33,7 @@ public class MomentSketchWrapperTest byte[] bs = mw.toByteArray(); MomentSketchWrapper mw2 = MomentSketchWrapper.fromByteArray(bs); - assertEquals(10, mw2.getPowerSums()[1], 1e-10); + Assert.assertEquals(10, mw2.getPowerSums()[1], 1e-10); } @Test @@ -47,7 +46,7 @@ public class MomentSketchWrapperTest } double[] ps = {0.0, 0.5, 1.0}; double[] qs = mw.getQuantiles(ps); - assertEquals(0, qs[0], 1.0); - assertEquals(50, qs[1], 1.0); + Assert.assertEquals(0, qs[0], 1.0); + Assert.assertEquals(50, qs[1], 1.0); } } diff --git a/extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentsSketchAggregatorTest.java b/extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentsSketchAggregatorTest.java index 3927dd4ffe4..922075ffac6 100644 --- a/extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentsSketchAggregatorTest.java +++ b/extensions-contrib/momentsketch/src/test/java/org/apache/druid/query/aggregation/momentsketch/aggregator/MomentsSketchAggregatorTest.java @@ -31,6 +31,7 @@ import org.apache.druid.query.aggregation.momentsketch.MomentSketchModule; import org.apache.druid.query.aggregation.momentsketch.MomentSketchWrapper; import org.apache.druid.query.groupby.GroupByQueryConfig; import org.apache.druid.query.groupby.GroupByQueryRunnerTest; +import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -42,8 +43,6 @@ import java.util.ArrayList; import java.util.Collection; import java.util.List; -import static org.junit.Assert.assertEquals; - @RunWith(Parameterized.class) public class MomentsSketchAggregatorTest { @@ -85,7 +84,7 @@ public class MomentsSketchAggregatorTest MomentSketchAggregatorFactory.class ); - assertEquals(factory, other); + Assert.assertEquals(factory, other); } @Test @@ -135,21 +134,21 @@ public class MomentsSketchAggregatorTest ) ); List results = seq.toList(); - assertEquals(1, results.size()); + Assert.assertEquals(1, results.size()); Row row = results.get(0); double[] quantilesArray = (double[]) row.getRaw("quantiles"); - assertEquals(0, quantilesArray[0], 0.05); - assertEquals(.5, quantilesArray[1], 0.05); - assertEquals(1.0, quantilesArray[2], 0.05); + Assert.assertEquals(0, quantilesArray[0], 0.05); + Assert.assertEquals(.5, quantilesArray[1], 0.05); + Assert.assertEquals(1.0, quantilesArray[2], 0.05); Double minValue = (Double) row.getRaw("min"); - assertEquals(0.0011, minValue, 0.0001); + Assert.assertEquals(0.0011, minValue, 0.0001); Double maxValue = (Double) row.getRaw("max"); - assertEquals(0.9969, maxValue, 0.0001); + Assert.assertEquals(0.9969, maxValue, 0.0001); MomentSketchWrapper sketchObject = (MomentSketchWrapper) row.getRaw("sketch"); - assertEquals(400.0, sketchObject.getPowerSums()[0], 1e-10); + Assert.assertEquals(400.0, sketchObject.getPowerSums()[0], 1e-10); } @Test @@ -193,12 +192,12 @@ public class MomentsSketchAggregatorTest ); List results = seq.toList(); - assertEquals(1, results.size()); + Assert.assertEquals(1, results.size()); Row row = results.get(0); MomentSketchWrapper sketchObject = (MomentSketchWrapper) row.getRaw("sketch"); // 9 total products since we pre-sum the values. - assertEquals(9.0, sketchObject.getPowerSums()[0], 1e-10); + Assert.assertEquals(9.0, sketchObject.getPowerSums()[0], 1e-10); } } diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/MovingAverageIterableTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/MovingAverageIterableTest.java index 3acc1f71a46..e6de033e671 100644 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/MovingAverageIterableTest.java +++ b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/MovingAverageIterableTest.java @@ -33,8 +33,10 @@ import org.apache.druid.query.filter.SelectorDimFilter; import org.apache.druid.query.movingaverage.averagers.AveragerFactory; import org.apache.druid.query.movingaverage.averagers.ConstantAveragerFactory; import org.apache.druid.query.movingaverage.averagers.LongMeanAveragerFactory; +import org.hamcrest.CoreMatchers; import org.joda.time.DateTime; import org.joda.time.chrono.ISOChronology; +import org.junit.Assert; import org.junit.Test; import java.util.ArrayList; @@ -45,16 +47,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import static org.hamcrest.CoreMatchers.equalTo; -import static org.hamcrest.CoreMatchers.not; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertThat; -import static org.junit.Assert.assertTrue; - -/** - * - */ public class MovingAverageIterableTest { private static final DateTime JAN_1 = new DateTime(2017, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()); @@ -121,54 +113,54 @@ public class MovingAverageIterableTest Iterator iter = iterable.iterator(); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); Row r = iter.next(); - assertEquals(JAN_1, r.getTimestamp()); - assertEquals("m", r.getRaw(GENDER)); + Assert.assertEquals(JAN_1, r.getTimestamp()); + Assert.assertEquals("m", r.getRaw(GENDER)); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); r = iter.next(); - assertEquals(JAN_1, r.getTimestamp()); - assertEquals("f", r.getRaw(GENDER)); + Assert.assertEquals(JAN_1, r.getTimestamp()); + Assert.assertEquals("f", r.getRaw(GENDER)); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); r = iter.next(); - assertEquals(JAN_2, r.getTimestamp()); - assertEquals("m", r.getRaw(GENDER)); + Assert.assertEquals(JAN_2, r.getTimestamp()); + Assert.assertEquals("m", r.getRaw(GENDER)); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); r = iter.next(); - assertEquals(JAN_2, r.getTimestamp()); - assertEquals("f", r.getRaw(GENDER)); + Assert.assertEquals(JAN_2, r.getTimestamp()); + Assert.assertEquals("f", r.getRaw(GENDER)); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); r = iter.next(); Row r2 = r; - assertEquals(JAN_3, r.getTimestamp()); - assertEquals("US", r.getRaw(COUNTRY)); + Assert.assertEquals(JAN_3, r.getTimestamp()); + Assert.assertEquals("US", r.getRaw(COUNTRY)); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); r = iter.next(); - assertEquals(JAN_3, r.getTimestamp()); - assertEquals("US", r.getRaw(COUNTRY)); - assertThat(r.getRaw(AGE), not(equalTo(r2.getRaw(AGE)))); + Assert.assertEquals(JAN_3, r.getTimestamp()); + Assert.assertEquals("US", r.getRaw(COUNTRY)); + Assert.assertThat(r.getRaw(AGE), CoreMatchers.not(CoreMatchers.equalTo(r2.getRaw(AGE)))); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); r = iter.next(); - assertEquals(JAN_4, r.getTimestamp()); - assertEquals("f", r.getRaw(GENDER)); + Assert.assertEquals(JAN_4, r.getTimestamp()); + Assert.assertEquals("f", r.getRaw(GENDER)); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); r = iter.next(); - assertEquals(JAN_4, r.getTimestamp()); - assertEquals("u", r.getRaw(GENDER)); + Assert.assertEquals(JAN_4, r.getTimestamp()); + Assert.assertEquals("u", r.getRaw(GENDER)); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); r = iter.next(); - assertEquals(JAN_4, r.getTimestamp()); - assertEquals("m", r.getRaw(GENDER)); + Assert.assertEquals(JAN_4, r.getTimestamp()); + Assert.assertEquals("m", r.getRaw(GENDER)); - assertFalse(iter.hasNext()); + Assert.assertFalse(iter.hasNext()); } @Test @@ -207,40 +199,41 @@ public class MovingAverageIterableTest new RowBucket(JAN_3, Arrays.asList(row3, row4)) )); - Iterator iter = new MovingAverageIterable(seq, ds, Arrays.asList( - new ConstantAveragerFactory("costPageViews", 7, retval), - new LongMeanAveragerFactory("movingAvgPageViews", 7, 1, "pageViews") - ), - Collections.emptyList(), - Collections.singletonList(new LongSumAggregatorFactory("pageViews", - "pageViews" - )) + Iterator iter = new MovingAverageIterable( + seq, + ds, + Arrays.asList( + new ConstantAveragerFactory("costPageViews", 7, retval), + new LongMeanAveragerFactory("movingAvgPageViews", 7, 1, "pageViews") + ), + Collections.emptyList(), + Collections.singletonList(new LongSumAggregatorFactory("pageViews", "pageViews")) ).iterator(); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); Row caResult = iter.next(); - assertEquals(JAN_1, caResult.getTimestamp()); - assertEquals("m", (caResult.getDimension("gender")).get(0)); - assertEquals(retval, caResult.getMetric("costPageViews").floatValue(), 0.0f); - assertEquals(1.4285715f, caResult.getMetric("movingAvgPageViews").floatValue(), 0.0f); + Assert.assertEquals(JAN_1, caResult.getTimestamp()); + Assert.assertEquals("m", (caResult.getDimension("gender")).get(0)); + Assert.assertEquals(retval, caResult.getMetric("costPageViews").floatValue(), 0.0f); + Assert.assertEquals(1.4285715f, caResult.getMetric("movingAvgPageViews").floatValue(), 0.0f); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); caResult = iter.next(); - assertEquals("m", (caResult.getDimension("gender")).get(0)); - assertEquals(4.285714f, caResult.getMetric("movingAvgPageViews").floatValue(), 0.0f); + Assert.assertEquals("m", (caResult.getDimension("gender")).get(0)); + Assert.assertEquals(4.285714f, caResult.getMetric("movingAvgPageViews").floatValue(), 0.0f); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); caResult = iter.next(); - assertEquals("m", (caResult.getDimension("gender")).get(0)); - assertEquals(8.571428f, caResult.getMetric("movingAvgPageViews").floatValue(), 0.0f); + Assert.assertEquals("m", (caResult.getDimension("gender")).get(0)); + Assert.assertEquals(8.571428f, caResult.getMetric("movingAvgPageViews").floatValue(), 0.0f); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); caResult = iter.next(); - assertEquals("f", (caResult.getDimension("gender")).get(0)); - assertEquals(5.714285850f, caResult.getMetric("movingAvgPageViews").floatValue(), 0.0f); + Assert.assertEquals("f", (caResult.getDimension("gender")).get(0)); + Assert.assertEquals(5.714285850f, caResult.getMetric("movingAvgPageViews").floatValue(), 0.0f); - assertFalse(iter.hasNext()); + Assert.assertFalse(iter.hasNext()); } @@ -276,45 +269,47 @@ public class MovingAverageIterableTest new RowBucket(JAN_2, Arrays.asList(jan2Row1, jan2Row2, jan2Row3)) )); - Iterator iter = new MovingAverageIterable(seq, ds, Collections.singletonList( - new LongMeanAveragerFactory("movingAvgPageViews", 2, 1, "pageViews")), - Collections.emptyList(), - Collections.singletonList(new LongSumAggregatorFactory("pageViews", - "pageViews" - )) + Iterator iter = new MovingAverageIterable( + seq, + ds, + Collections.singletonList( + new LongMeanAveragerFactory("movingAvgPageViews", 2, 1, "pageViews") + ), + Collections.emptyList(), + Collections.singletonList(new LongSumAggregatorFactory("pageViews", "pageViews")) ).iterator(); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); Row result = iter.next(); - assertEquals("m", (result.getDimension("gender")).get(0)); - assertEquals(JAN_1, (result.getTimestamp())); + Assert.assertEquals("m", (result.getDimension("gender")).get(0)); + Assert.assertEquals(JAN_1, (result.getTimestamp())); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("f", (result.getDimension("gender")).get(0)); - assertEquals(JAN_1, (result.getTimestamp())); + Assert.assertEquals("f", (result.getDimension("gender")).get(0)); + Assert.assertEquals(JAN_1, (result.getTimestamp())); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("u", (result.getDimension("gender")).get(0)); - assertEquals(JAN_1, (result.getTimestamp())); + Assert.assertEquals("u", (result.getDimension("gender")).get(0)); + Assert.assertEquals(JAN_1, (result.getTimestamp())); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("m", (result.getDimension("gender")).get(0)); - assertEquals(JAN_2, (result.getTimestamp())); + Assert.assertEquals("m", (result.getDimension("gender")).get(0)); + Assert.assertEquals(JAN_2, (result.getTimestamp())); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("f", (result.getDimension("gender")).get(0)); - assertEquals(JAN_2, (result.getTimestamp())); + Assert.assertEquals("f", (result.getDimension("gender")).get(0)); + Assert.assertEquals(JAN_2, (result.getTimestamp())); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("u", (result.getDimension("gender")).get(0)); - assertEquals(JAN_2, (result.getTimestamp())); + Assert.assertEquals("u", (result.getDimension("gender")).get(0)); + Assert.assertEquals(JAN_2, (result.getTimestamp())); - assertFalse(iter.hasNext()); + Assert.assertFalse(iter.hasNext()); } @@ -348,35 +343,37 @@ public class MovingAverageIterableTest new RowBucket(JAN_2, Arrays.asList(jan2Row1, jan2Row2, jan2Row3)) )); - Iterator iter = new MovingAverageIterable(seq, ds, Collections.singletonList( - new LongMeanAveragerFactory("movingAvgPageViews", 2, 1, "pageViews")), - Collections.emptyList(), - Collections.singletonList(new LongSumAggregatorFactory("pageViews", - "pageViews" - )) + Iterator iter = new MovingAverageIterable( + seq, + ds, + Collections.singletonList( + new LongMeanAveragerFactory("movingAvgPageViews", 2, 1, "pageViews") + ), + Collections.emptyList(), + Collections.singletonList(new LongSumAggregatorFactory("pageViews", "pageViews")) ).iterator(); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); Row result = iter.next(); - assertEquals("m", (result.getDimension("gender")).get(0)); - assertEquals(JAN_1, (result.getTimestamp())); + Assert.assertEquals("m", (result.getDimension("gender")).get(0)); + Assert.assertEquals(JAN_1, (result.getTimestamp())); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("m", (result.getDimension("gender")).get(0)); - assertEquals(JAN_2, (result.getTimestamp())); + Assert.assertEquals("m", (result.getDimension("gender")).get(0)); + Assert.assertEquals(JAN_2, (result.getTimestamp())); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("f", (result.getDimension("gender")).get(0)); - assertEquals(JAN_2, (result.getTimestamp())); + Assert.assertEquals("f", (result.getDimension("gender")).get(0)); + Assert.assertEquals(JAN_2, (result.getTimestamp())); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("u", (result.getDimension("gender")).get(0)); - assertEquals(JAN_2, (result.getTimestamp())); + Assert.assertEquals("u", (result.getDimension("gender")).get(0)); + Assert.assertEquals(JAN_2, (result.getTimestamp())); - assertFalse(iter.hasNext()); + Assert.assertFalse(iter.hasNext()); } // test injection when the data is missing at the end @@ -408,45 +405,47 @@ public class MovingAverageIterableTest new RowBucket(JAN_2, Collections.singletonList(jan2Row1)) )); - Iterator iter = new MovingAverageIterable(seq, ds, Collections.singletonList( - new LongMeanAveragerFactory("movingAvgPageViews", 2, 1, "pageViews")), - Collections.emptyList(), - Collections.singletonList(new LongSumAggregatorFactory("pageViews", - "pageViews" - )) + Iterator iter = new MovingAverageIterable( + seq, + ds, + Collections.singletonList( + new LongMeanAveragerFactory("movingAvgPageViews", 2, 1, "pageViews") + ), + Collections.emptyList(), + Collections.singletonList(new LongSumAggregatorFactory("pageViews", "pageViews")) ).iterator(); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); Row result = iter.next(); - assertEquals("m", (result.getDimension("gender")).get(0)); - assertEquals(JAN_1, (result.getTimestamp())); + Assert.assertEquals("m", (result.getDimension("gender")).get(0)); + Assert.assertEquals(JAN_1, (result.getTimestamp())); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("f", (result.getDimension("gender")).get(0)); - assertEquals(JAN_1, (result.getTimestamp())); + Assert.assertEquals("f", (result.getDimension("gender")).get(0)); + Assert.assertEquals(JAN_1, (result.getTimestamp())); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("u", (result.getDimension("gender")).get(0)); - assertEquals(JAN_1, (result.getTimestamp())); + Assert.assertEquals("u", (result.getDimension("gender")).get(0)); + Assert.assertEquals(JAN_1, (result.getTimestamp())); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("m", (result.getDimension("gender")).get(0)); - assertEquals(JAN_2, (result.getTimestamp())); + Assert.assertEquals("m", (result.getDimension("gender")).get(0)); + Assert.assertEquals(JAN_2, (result.getTimestamp())); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("u", (result.getDimension("gender")).get(0)); - assertEquals(JAN_2, (result.getTimestamp())); + Assert.assertEquals("u", (result.getDimension("gender")).get(0)); + Assert.assertEquals(JAN_2, (result.getTimestamp())); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("f", (result.getDimension("gender")).get(0)); - assertEquals(JAN_2, (result.getTimestamp())); + Assert.assertEquals("f", (result.getDimension("gender")).get(0)); + Assert.assertEquals(JAN_2, (result.getTimestamp())); - assertFalse(iter.hasNext()); + Assert.assertFalse(iter.hasNext()); } // test injection when the data is missing in the middle @@ -457,7 +456,6 @@ public class MovingAverageIterableTest Map eventM = new HashMap<>(); Map eventF = new HashMap<>(); Map eventU = new HashMap<>(); - Map event4 = new HashMap<>(); eventM.put("gender", "m"); eventM.put("pageViews", 10L); @@ -485,79 +483,81 @@ public class MovingAverageIterableTest new RowBucket(JAN_4, Collections.singletonList(jan4Row1M)) )); - Iterator iter = new MovingAverageIterable(seq, ds, Collections.singletonList( - new LongMeanAveragerFactory("movingAvgPageViews", 3, 1, "pageViews")), - Collections.emptyList(), - Collections.singletonList(new LongSumAggregatorFactory("pageViews", - "pageViews" - )) + Iterator iter = new MovingAverageIterable( + seq, + ds, + Collections.singletonList( + new LongMeanAveragerFactory("movingAvgPageViews", 3, 1, "pageViews") + ), + Collections.emptyList(), + Collections.singletonList(new LongSumAggregatorFactory("pageViews", "pageViews")) ).iterator(); // Jan 1 - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); Row result = iter.next(); - assertEquals("m", (result.getDimension("gender")).get(0)); - assertEquals(JAN_1, (result.getTimestamp())); + Assert.assertEquals("m", (result.getDimension("gender")).get(0)); + Assert.assertEquals(JAN_1, (result.getTimestamp())); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("f", (result.getDimension("gender")).get(0)); - assertEquals(JAN_1, (result.getTimestamp())); + Assert.assertEquals("f", (result.getDimension("gender")).get(0)); + Assert.assertEquals(JAN_1, (result.getTimestamp())); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("u", (result.getDimension("gender")).get(0)); - assertEquals(JAN_1, (result.getTimestamp())); + Assert.assertEquals("u", (result.getDimension("gender")).get(0)); + Assert.assertEquals(JAN_1, (result.getTimestamp())); // Jan 2 - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("m", (result.getDimension("gender")).get(0)); - assertEquals(JAN_2, (result.getTimestamp())); + Assert.assertEquals("m", (result.getDimension("gender")).get(0)); + Assert.assertEquals(JAN_2, (result.getTimestamp())); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("u", (result.getDimension("gender")).get(0)); - assertEquals(JAN_2, (result.getTimestamp())); + Assert.assertEquals("u", (result.getDimension("gender")).get(0)); + Assert.assertEquals(JAN_2, (result.getTimestamp())); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("f", (result.getDimension("gender")).get(0)); - assertEquals(JAN_2, (result.getTimestamp())); + Assert.assertEquals("f", (result.getDimension("gender")).get(0)); + Assert.assertEquals(JAN_2, (result.getTimestamp())); // Jan 3 - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("m", (result.getDimension("gender")).get(0)); - assertEquals(JAN_3, (result.getTimestamp())); + Assert.assertEquals("m", (result.getDimension("gender")).get(0)); + Assert.assertEquals(JAN_3, (result.getTimestamp())); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("f", (result.getDimension("gender")).get(0)); - assertEquals(JAN_3, (result.getTimestamp())); + Assert.assertEquals("f", (result.getDimension("gender")).get(0)); + Assert.assertEquals(JAN_3, (result.getTimestamp())); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("u", (result.getDimension("gender")).get(0)); - assertEquals(JAN_3, (result.getTimestamp())); + Assert.assertEquals("u", (result.getDimension("gender")).get(0)); + Assert.assertEquals(JAN_3, (result.getTimestamp())); // Jan 4 - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("m", (result.getDimension("gender")).get(0)); - assertEquals(JAN_4, (result.getTimestamp())); + Assert.assertEquals("m", (result.getDimension("gender")).get(0)); + Assert.assertEquals(JAN_4, (result.getTimestamp())); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("u", (result.getDimension("gender")).get(0)); - assertEquals(JAN_4, (result.getTimestamp())); + Assert.assertEquals("u", (result.getDimension("gender")).get(0)); + Assert.assertEquals(JAN_4, (result.getTimestamp())); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("f", (result.getDimension("gender")).get(0)); - assertEquals(JAN_4, (result.getTimestamp())); + Assert.assertEquals("f", (result.getDimension("gender")).get(0)); + Assert.assertEquals(JAN_4, (result.getTimestamp())); - assertFalse(iter.hasNext()); + Assert.assertFalse(iter.hasNext()); } @Test @@ -585,25 +585,27 @@ public class MovingAverageIterableTest new RowBucket(JAN_4, Collections.singletonList(row2)) )); - Iterator iter = new MovingAverageIterable(seq, ds, Collections.singletonList( - new LongMeanAveragerFactory("movingAvgPageViews", 4, 1, "pageViews")), - Collections.emptyList(), - Collections.singletonList(new LongSumAggregatorFactory("pageViews", - "pageViews" - )) + Iterator iter = new MovingAverageIterable( + seq, + ds, + Collections.singletonList( + new LongMeanAveragerFactory("movingAvgPageViews", 4, 1, "pageViews") + ), + Collections.emptyList(), + Collections.singletonList(new LongSumAggregatorFactory("pageViews", "pageViews")) ).iterator(); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); Row result = iter.next(); - assertEquals("m", (result.getDimension("gender")).get(0)); - assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); + Assert.assertEquals("m", (result.getDimension("gender")).get(0)); + Assert.assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("m", (result.getDimension("gender")).get(0)); - assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); + Assert.assertEquals("m", (result.getDimension("gender")).get(0)); + Assert.assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - assertFalse(iter.hasNext()); + Assert.assertFalse(iter.hasNext()); } @Test @@ -632,35 +634,37 @@ public class MovingAverageIterableTest new RowBucket(JAN_4, Collections.singletonList(row2)) )); - Iterator iter = new MovingAverageIterable(seq, ds, Collections.singletonList( - new LongMeanAveragerFactory("movingAvgPageViews", 4, 1, "pageViews")), - Collections.emptyList(), - Collections.singletonList(new LongSumAggregatorFactory("pageViews", - "pageViews" - )) + Iterator iter = new MovingAverageIterable( + seq, + ds, + Collections.singletonList( + new LongMeanAveragerFactory("movingAvgPageViews", 4, 1, "pageViews") + ), + Collections.emptyList(), + Collections.singletonList(new LongSumAggregatorFactory("pageViews", "pageViews")) ).iterator(); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); Row result = iter.next(); - assertEquals("m", (result.getDimension("gender")).get(0)); - assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); + Assert.assertEquals("m", (result.getDimension("gender")).get(0)); + Assert.assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("m", (result.getDimension("gender")).get(0)); - assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); + Assert.assertEquals("m", (result.getDimension("gender")).get(0)); + Assert.assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("m", (result.getDimension("gender")).get(0)); - assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); + Assert.assertEquals("m", (result.getDimension("gender")).get(0)); + Assert.assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("m", (result.getDimension("gender")).get(0)); - assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); + Assert.assertEquals("m", (result.getDimension("gender")).get(0)); + Assert.assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - assertFalse(iter.hasNext()); + Assert.assertFalse(iter.hasNext()); } @Test @@ -693,34 +697,35 @@ public class MovingAverageIterableTest DimFilter filter = new SelectorDimFilter("gender", "m", null); FilteredAggregatorFactory filteredAggregatorFactory = new FilteredAggregatorFactory(aggregatorFactory, filter); - Iterator iter = new MovingAverageIterable(seq, ds, Collections.singletonList( - averagerfactory), - Collections.emptyList(), - Collections.singletonList( - filteredAggregatorFactory) + Iterator iter = new MovingAverageIterable( + seq, + ds, + Collections.singletonList(averagerfactory), + Collections.emptyList(), + Collections.singletonList(filteredAggregatorFactory) ).iterator(); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); Row result = iter.next(); - assertEquals("m", (result.getDimension("gender")).get(0)); - assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); + Assert.assertEquals("m", (result.getDimension("gender")).get(0)); + Assert.assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("m", (result.getDimension("gender")).get(0)); - assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); + Assert.assertEquals("m", (result.getDimension("gender")).get(0)); + Assert.assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("m", (result.getDimension("gender")).get(0)); - assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); + Assert.assertEquals("m", (result.getDimension("gender")).get(0)); + Assert.assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals("m", (result.getDimension("gender")).get(0)); - assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); + Assert.assertEquals("m", (result.getDimension("gender")).get(0)); + Assert.assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - assertFalse(iter.hasNext()); + Assert.assertFalse(iter.hasNext()); } @Test @@ -751,53 +756,53 @@ public class MovingAverageIterableTest new RowBucket(JAN_6, Collections.emptyList()) )); - Iterator iter = new MovingAverageIterable(seq, ds, Collections.singletonList( - new LongMeanAveragerFactory("movingAvgPageViews", 4, 1, "pageViews")), - Collections.emptyList(), - Collections.singletonList(new LongSumAggregatorFactory("pageViews", - "pageViews" - )) + Iterator iter = new MovingAverageIterable( + seq, + ds, + Collections.singletonList( + new LongMeanAveragerFactory("movingAvgPageViews", 4, 1, "pageViews") + ), + Collections.emptyList(), + Collections.singletonList(new LongSumAggregatorFactory("pageViews", "pageViews")) ).iterator(); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); Row result = iter.next(); - assertEquals(JAN_1, result.getTimestamp()); - assertEquals("m", (result.getDimension("gender")).get(0)); - assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); + Assert.assertEquals(JAN_1, result.getTimestamp()); + Assert.assertEquals("m", (result.getDimension("gender")).get(0)); + Assert.assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals(JAN_2, result.getTimestamp()); - assertEquals("m", (result.getDimension("gender")).get(0)); - assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); + Assert.assertEquals(JAN_2, result.getTimestamp()); + Assert.assertEquals("m", (result.getDimension("gender")).get(0)); + Assert.assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals(JAN_3, result.getTimestamp()); - assertEquals("m", (result.getDimension("gender")).get(0)); - assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); + Assert.assertEquals(JAN_3, result.getTimestamp()); + Assert.assertEquals("m", (result.getDimension("gender")).get(0)); + Assert.assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals(JAN_4, result.getTimestamp()); - assertEquals("m", (result.getDimension("gender")).get(0)); - assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); + Assert.assertEquals(JAN_4, result.getTimestamp()); + Assert.assertEquals("m", (result.getDimension("gender")).get(0)); + Assert.assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals(JAN_5, result.getTimestamp()); - assertEquals("m", (result.getDimension("gender")).get(0)); - assertEquals(5.0f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); + Assert.assertEquals(JAN_5, result.getTimestamp()); + Assert.assertEquals("m", (result.getDimension("gender")).get(0)); + Assert.assertEquals(5.0f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); result = iter.next(); - assertEquals(JAN_6, result.getTimestamp()); - assertEquals("m", (result.getDimension("gender")).get(0)); - assertEquals(0.0f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); - - assertFalse(iter.hasNext()); + Assert.assertEquals(JAN_6, result.getTimestamp()); + Assert.assertEquals("m", (result.getDimension("gender")).get(0)); + Assert.assertEquals(0.0f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f); + Assert.assertFalse(iter.hasNext()); } - } diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/MovingAverageQueryTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/MovingAverageQueryTest.java index e66e50d11e6..b5a8dbf57ff 100644 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/MovingAverageQueryTest.java +++ b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/MovingAverageQueryTest.java @@ -19,9 +19,7 @@ package org.apache.druid.query.movingaverage; -import com.fasterxml.jackson.core.JsonParseException; import com.fasterxml.jackson.core.type.TypeReference; -import com.fasterxml.jackson.databind.JsonMappingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.databind.node.ArrayNode; import com.fasterxml.jackson.dataformat.yaml.YAMLFactory; @@ -70,6 +68,7 @@ import org.apache.druid.server.initialization.ServerConfig; import org.apache.druid.timeline.TimelineLookup; import org.hamcrest.core.IsInstanceOf; import org.joda.time.Interval; +import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -86,10 +85,6 @@ import java.util.List; import java.util.Map; import java.util.concurrent.Executor; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertThat; - /** * Base class for implementing MovingAverageQuery tests */ @@ -97,7 +92,6 @@ import static org.junit.Assert.assertThat; public class MovingAverageQueryTest { private final ObjectMapper jsonMapper; - private final Injector injector; private final QueryToolChestWarehouse warehouse; private final RetryQueryRunnerConfig retryConfig; private final ServerConfig serverConfig; @@ -106,7 +100,6 @@ public class MovingAverageQueryTest private final List> timeseriesResults = new ArrayList<>(); private final TestConfig config; - private final String yamlFile; @Parameters(name = "{0}") public static Iterable data() throws IOException @@ -116,7 +109,7 @@ public class MovingAverageQueryTest List tests = new ArrayList<>(); for (String line = testReader.readLine(); line != null; line = testReader.readLine()) { - tests.add(new String[] {line}); + tests.add(new String[]{line}); } return tests; @@ -124,7 +117,6 @@ public class MovingAverageQueryTest public MovingAverageQueryTest(String yamlFile) throws IOException { - this.yamlFile = yamlFile; List modules = getRequiredModules(); modules.add( @@ -137,19 +129,13 @@ public class MovingAverageQueryTest @Override public QueryRunner getQueryRunnerForIntervals(Query query, Iterable intervals) { - return new QueryRunner() - { - @Override - @SuppressWarnings("unchecked") - public Sequence run(QueryPlus queryPlus, Map responseContext) - { - if (query instanceof GroupByQuery) { - return (Sequence) Sequences.simple(groupByResults); - } else if (query instanceof TimeseriesQuery) { - return (Sequence) Sequences.simple(timeseriesResults); - } - throw new UnsupportedOperationException("unexpected query type " + query.getType()); + return (queryPlus, responseContext) -> { + if (query instanceof GroupByQuery) { + return (Sequence) Sequences.simple(groupByResults); + } else if (query instanceof TimeseriesQuery) { + return (Sequence) Sequences.simple(timeseriesResults); } + throw new UnsupportedOperationException("unexpected query type " + query.getType()); }; } @@ -165,7 +151,7 @@ public class MovingAverageQueryTest System.setProperty("druid.generic.useDefaultValueForNull", "true"); System.setProperty("druid.processing.buffer.sizeBytes", "655360"); Injector baseInjector = GuiceInjectors.makeStartupInjector(); - injector = Initialization.makeInjectorWithModules(baseInjector, modules); + Injector injector = Initialization.makeInjectorWithModules(baseInjector, modules); jsonMapper = injector.getInstance(ObjectMapper.class); warehouse = injector.getInstance(QueryToolChestWarehouse.class); @@ -182,7 +168,7 @@ public class MovingAverageQueryTest * * @return The JSON query */ - protected String getQueryString() + private String getQueryString() { return config.query.toString(); } @@ -192,7 +178,7 @@ public class MovingAverageQueryTest * * @return The JSON result */ - protected String getExpectedResultString() + private String getExpectedResultString() { return config.expectedOutput.toString(); } @@ -204,7 +190,7 @@ public class MovingAverageQueryTest * * @return The JSON result from the groupby query */ - protected String getGroupByResultJson() + private String getGroupByResultJson() { ArrayNode node = config.intermediateResults.get("groupBy"); return node == null ? null : node.toString(); @@ -217,7 +203,7 @@ public class MovingAverageQueryTest * * @return The JSON result from the timeseries query */ - protected String getTimeseriesResultJson() + private String getTimeseriesResultJson() { ArrayNode node = config.intermediateResults.get("timeseries"); return node == null ? null : node.toString(); @@ -228,12 +214,12 @@ public class MovingAverageQueryTest * * @return The Query type */ - protected Class getExpectedQueryType() + private Class getExpectedQueryType() { return MovingAverageQuery.class; } - protected TypeReference getExpectedResultType() + private TypeReference getExpectedResultType() { return new TypeReference>() { @@ -242,10 +228,8 @@ public class MovingAverageQueryTest /** * Returns a list of any additional Druid Modules necessary to run the test. - * - * @return List of Druid Modules */ - protected List getRequiredModules() + private List getRequiredModules() { List list = new ArrayList<>(); @@ -258,12 +242,8 @@ public class MovingAverageQueryTest /** * Set up any needed mocks to stub out backend query behavior. - * - * @throws IOException - * @throws JsonMappingException - * @throws JsonParseException */ - protected void defineMocks() throws IOException + private void defineMocks() throws IOException { groupByResults.clear(); timeseriesResults.clear(); @@ -286,45 +266,39 @@ public class MovingAverageQueryTest /** * converts Int to Long, Float to Double in the actual and expected result - * - * @param result */ - protected List consistentTypeCasting(List result) + private List consistentTypeCasting(List result) { List newResult = new ArrayList<>(); for (MapBasedRow row : result) { final Map event = Maps.newLinkedHashMap((row).getEvent()); event.forEach((key, value) -> { - if (Integer.class.isInstance(value)) { + if (value instanceof Integer) { event.put(key, ((Integer) value).longValue()); } - if (Float.class.isInstance(value)) { + if (value instanceof Float) { event.put(key, ((Float) value).doubleValue()); } }); newResult.add(new MapBasedRow(row.getTimestamp(), event)); } - + return newResult; } /** * Validate that the specified query behaves correctly. - * - * @throws IOException - * @throws JsonMappingException - * @throws JsonParseException */ @SuppressWarnings({"unchecked", "rawtypes"}) @Test public void testQuery() throws IOException { Query query = jsonMapper.readValue(getQueryString(), Query.class); - assertThat(query, IsInstanceOf.instanceOf(getExpectedQueryType())); + Assert.assertThat(query, IsInstanceOf.instanceOf(getExpectedQueryType())); List expectedResults = jsonMapper.readValue(getExpectedResultString(), getExpectedResultType()); - assertNotNull(expectedResults); - assertThat(expectedResults, IsInstanceOf.instanceOf(List.class)); + Assert.assertNotNull(expectedResults); + Assert.assertThat(expectedResults, IsInstanceOf.instanceOf(List.class)); CachingClusteredClient baseClient = new CachingClusteredClient( warehouse, @@ -403,6 +377,6 @@ public class MovingAverageQueryTest expectedResults = consistentTypeCasting(expectedResults); actualResults = consistentTypeCasting(actualResults); - assertEquals(expectedResults, actualResults); + Assert.assertEquals(expectedResults, actualResults); } } diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/PostAveragerAggregatorCalculatorTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/PostAveragerAggregatorCalculatorTest.java index 51c707749f1..542408c75de 100644 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/PostAveragerAggregatorCalculatorTest.java +++ b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/PostAveragerAggregatorCalculatorTest.java @@ -31,6 +31,7 @@ import org.apache.druid.query.spec.MultipleIntervalSegmentSpec; import org.joda.time.DateTime; import org.joda.time.Interval; import org.joda.time.chrono.ISOChronology; +import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -39,17 +40,8 @@ import java.util.Collections; import java.util.HashMap; import java.util.Map; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertNull; - - -/** - * Unit tests for PostAveragerCalcualtor - */ public class PostAveragerAggregatorCalculatorTest { - - private MovingAverageQuery query; private PostAveragerAggregatorCalculator pac; private Map event; private MapBasedRow row; @@ -58,9 +50,12 @@ public class PostAveragerAggregatorCalculatorTest public void setup() { System.setProperty("druid.generic.useDefaultValueForNull", "true"); - query = new MovingAverageQuery( + MovingAverageQuery query = new MovingAverageQuery( new TableDataSource("d"), - new MultipleIntervalSegmentSpec(Collections.singletonList(new Interval("2017-01-01/2017-01-01", ISOChronology.getInstanceUTC()))), + new MultipleIntervalSegmentSpec(Collections.singletonList(new Interval( + "2017-01-01/2017-01-01", + ISOChronology.getInstanceUTC() + ))), null, Granularities.DAY, null, @@ -88,22 +83,22 @@ public class PostAveragerAggregatorCalculatorTest @Test public void testApply() { - event.put("count", new Double(10.0)); - event.put("avgCount", new Double(12.0)); + event.put("count", 10.0); + event.put("avgCount", 12.0); Row result = pac.apply(row); - assertEquals(result.getMetric("avgCountRatio").floatValue(), 10.0f / 12.0f, 0.0); + Assert.assertEquals(10.0f / 12.0f, result.getMetric("avgCountRatio").floatValue(), 0.0); } @Test public void testApplyMissingColumn() { - event.put("count", new Double(10.0)); + event.put("count", 10.0); Row result = pac.apply(row); - assertEquals(result.getMetric("avgCountRatio").floatValue(), 0.0, 0.0); - assertNull(result.getRaw("avgCountRatio")); + Assert.assertEquals(0.0, result.getMetric("avgCountRatio").floatValue(), 0.0); + Assert.assertNull(result.getRaw("avgCountRatio")); } } diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/RowBucketIterableTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/RowBucketIterableTest.java index 7504a979bac..b49e4d95033 100644 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/RowBucketIterableTest.java +++ b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/RowBucketIterableTest.java @@ -27,6 +27,7 @@ import org.joda.time.DateTime; import org.joda.time.Interval; import org.joda.time.Period; import org.joda.time.chrono.ISOChronology; +import org.junit.Assert; import org.junit.BeforeClass; import org.junit.Test; @@ -38,12 +39,8 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - public class RowBucketIterableTest { - private static final DateTime JAN_1 = new DateTime(2017, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC()); private static final DateTime JAN_2 = new DateTime(2017, 1, 2, 0, 0, 0, 0, ISOChronology.getInstanceUTC()); private static final DateTime JAN_3 = new DateTime(2017, 1, 3, 0, 0, 0, 0, ISOChronology.getInstanceUTC()); @@ -91,11 +88,9 @@ public class RowBucketIterableTest EVENT_U_30.put("pageViews", 30L); } - // normal case. data for all the days present @Test public void testCompleteData() { - intervals = new ArrayList<>(); intervals.add(INTERVAL_JAN_1_4); @@ -115,27 +110,25 @@ public class RowBucketIterableTest Iterator iter = rbi.iterator(); RowBucket actual = iter.next(); - assertEquals(JAN_1, actual.getDateTime()); - assertEquals(expectedDay1, actual.getRows()); + Assert.assertEquals(JAN_1, actual.getDateTime()); + Assert.assertEquals(expectedDay1, actual.getRows()); actual = iter.next(); - assertEquals(JAN_2, actual.getDateTime()); - assertEquals(expectedDay2, actual.getRows()); + Assert.assertEquals(JAN_2, actual.getDateTime()); + Assert.assertEquals(expectedDay2, actual.getRows()); actual = iter.next(); - assertEquals(JAN_3, actual.getDateTime()); - assertEquals(expectedDay3, actual.getRows()); + Assert.assertEquals(JAN_3, actual.getDateTime()); + Assert.assertEquals(expectedDay3, actual.getRows()); actual = iter.next(); - assertEquals(JAN_4, actual.getDateTime()); - assertEquals(expectedDay4, actual.getRows()); + Assert.assertEquals(JAN_4, actual.getDateTime()); + Assert.assertEquals(expectedDay4, actual.getRows()); } - // all days present and last day only has one row @Test public void testApplyLastDaySingleRow() { - intervals = new ArrayList<>(); intervals.add(INTERVAL_JAN_1_4); @@ -156,23 +149,21 @@ public class RowBucketIterableTest Iterator iter = rbi.iterator(); RowBucket actual = iter.next(); - assertEquals(expectedDay1, actual.getRows()); + Assert.assertEquals(expectedDay1, actual.getRows()); actual = iter.next(); - assertEquals(expectedDay2, actual.getRows()); + Assert.assertEquals(expectedDay2, actual.getRows()); actual = iter.next(); - assertEquals(expectedDay3, actual.getRows()); + Assert.assertEquals(expectedDay3, actual.getRows()); actual = iter.next(); - assertEquals(expectedDay4, actual.getRows()); + Assert.assertEquals(expectedDay4, actual.getRows()); } - // all days present and last day has multiple rows @Test public void testApplyLastDayMultipleRows() { - intervals = new ArrayList<>(); intervals.add(INTERVAL_JAN_1_4); @@ -195,23 +186,21 @@ public class RowBucketIterableTest Iterator iter = rbi.iterator(); RowBucket actual = iter.next(); - assertEquals(expectedDay1, actual.getRows()); + Assert.assertEquals(expectedDay1, actual.getRows()); actual = iter.next(); - assertEquals(expectedDay2, actual.getRows()); + Assert.assertEquals(expectedDay2, actual.getRows()); actual = iter.next(); - assertEquals(expectedDay3, actual.getRows()); + Assert.assertEquals(expectedDay3, actual.getRows()); actual = iter.next(); - assertEquals(expectedDay4, actual.getRows()); + Assert.assertEquals(expectedDay4, actual.getRows()); } - // test single day with single row @Test public void testSingleDaySingleRow() { - intervals = new ArrayList<>(); intervals.add(INTERVAL_JAN_1_1); @@ -225,16 +214,13 @@ public class RowBucketIterableTest Iterator iter = rbi.iterator(); RowBucket actual = iter.next(); - assertEquals(expectedDay1, actual.getRows()); - assertEquals(JAN_1, actual.getDateTime()); - + Assert.assertEquals(expectedDay1, actual.getRows()); + Assert.assertEquals(JAN_1, actual.getDateTime()); } - // test single day with multiple rows @Test public void testSingleDayMultipleRow() { - intervals = new ArrayList<>(); intervals.add(INTERVAL_JAN_1_1); @@ -250,16 +236,13 @@ public class RowBucketIterableTest Iterator iter = rbi.iterator(); RowBucket actual = iter.next(); - assertEquals(JAN_1, actual.getDateTime()); - assertEquals(expectedDay1, actual.getRows()); - + Assert.assertEquals(JAN_1, actual.getDateTime()); + Assert.assertEquals(expectedDay1, actual.getRows()); } - // missing day at the beginning followed by single row @Test public void testMissingDaysAtBegining() { - List expectedDay1 = Collections.emptyList(); List expectedDay2 = Collections.singletonList(JAN_2_M_10); @@ -274,20 +257,17 @@ public class RowBucketIterableTest Iterator iter = rbi.iterator(); RowBucket actual = iter.next(); - assertEquals(JAN_1, actual.getDateTime()); - assertEquals(expectedDay1, actual.getRows()); + Assert.assertEquals(JAN_1, actual.getDateTime()); + Assert.assertEquals(expectedDay1, actual.getRows()); actual = iter.next(); - assertEquals(JAN_2, actual.getDateTime()); - assertEquals(expectedDay2, actual.getRows()); - + Assert.assertEquals(JAN_2, actual.getDateTime()); + Assert.assertEquals(expectedDay2, actual.getRows()); } - // missing day at the beginning followed by multiple row @Test public void testMissingDaysAtBeginingFollowedByMultipleRow() { - List expectedDay1 = Collections.emptyList(); List expectedDay2 = Collections.singletonList(JAN_2_M_10); List expectedDay3 = Collections.singletonList(JAN_3_M_10); @@ -306,27 +286,25 @@ public class RowBucketIterableTest Iterator iter = rbi.iterator(); RowBucket actual = iter.next(); - assertEquals(JAN_1, actual.getDateTime()); - assertEquals(expectedDay1, actual.getRows()); + Assert.assertEquals(JAN_1, actual.getDateTime()); + Assert.assertEquals(expectedDay1, actual.getRows()); actual = iter.next(); - assertEquals(JAN_2, actual.getDateTime()); - assertEquals(expectedDay2, actual.getRows()); + Assert.assertEquals(JAN_2, actual.getDateTime()); + Assert.assertEquals(expectedDay2, actual.getRows()); actual = iter.next(); - assertEquals(JAN_3, actual.getDateTime()); - assertEquals(expectedDay3, actual.getRows()); + Assert.assertEquals(JAN_3, actual.getDateTime()); + Assert.assertEquals(expectedDay3, actual.getRows()); actual = iter.next(); - assertEquals(JAN_4, actual.getDateTime()); - assertEquals(expectedDay4, actual.getRows()); + Assert.assertEquals(JAN_4, actual.getDateTime()); + Assert.assertEquals(expectedDay4, actual.getRows()); } - // missing day at the beginning and at the end @Test public void testMissingDaysAtBeginingAndAtTheEnd() { - List expectedDay1 = Collections.emptyList(); List expectedDay2 = Collections.singletonList(JAN_2_M_10); List expectedDay3 = Collections.singletonList(JAN_3_M_10); @@ -344,27 +322,25 @@ public class RowBucketIterableTest Iterator iter = rbi.iterator(); RowBucket actual = iter.next(); - assertEquals(JAN_1, actual.getDateTime()); - assertEquals(expectedDay1, actual.getRows()); + Assert.assertEquals(JAN_1, actual.getDateTime()); + Assert.assertEquals(expectedDay1, actual.getRows()); actual = iter.next(); - assertEquals(JAN_2, actual.getDateTime()); - assertEquals(expectedDay2, actual.getRows()); + Assert.assertEquals(JAN_2, actual.getDateTime()); + Assert.assertEquals(expectedDay2, actual.getRows()); actual = iter.next(); - assertEquals(JAN_3, actual.getDateTime()); - assertEquals(expectedDay3, actual.getRows()); + Assert.assertEquals(JAN_3, actual.getDateTime()); + Assert.assertEquals(expectedDay3, actual.getRows()); actual = iter.next(); - assertEquals(JAN_4, actual.getDateTime()); - assertEquals(expectedDay4, actual.getRows()); + Assert.assertEquals(JAN_4, actual.getDateTime()); + Assert.assertEquals(expectedDay4, actual.getRows()); } - // multiple missing days in an interval @Test public void testMultipleMissingDays() { - List expectedDay1 = Collections.emptyList(); List expectedDay2 = Collections.singletonList(JAN_2_M_10); List expectedDay3 = Collections.emptyList(); @@ -382,27 +358,25 @@ public class RowBucketIterableTest Iterator iter = rbi.iterator(); RowBucket actual = iter.next(); - assertEquals(JAN_1, actual.getDateTime()); - assertEquals(expectedDay1, actual.getRows()); + Assert.assertEquals(JAN_1, actual.getDateTime()); + Assert.assertEquals(expectedDay1, actual.getRows()); actual = iter.next(); - assertEquals(JAN_2, actual.getDateTime()); - assertEquals(expectedDay2, actual.getRows()); + Assert.assertEquals(JAN_2, actual.getDateTime()); + Assert.assertEquals(expectedDay2, actual.getRows()); actual = iter.next(); - assertEquals(JAN_3, actual.getDateTime()); - assertEquals(expectedDay3, actual.getRows()); + Assert.assertEquals(JAN_3, actual.getDateTime()); + Assert.assertEquals(expectedDay3, actual.getRows()); actual = iter.next(); - assertEquals(JAN_4, actual.getDateTime()); - assertEquals(expectedDay4, actual.getRows()); + Assert.assertEquals(JAN_4, actual.getDateTime()); + Assert.assertEquals(expectedDay4, actual.getRows()); } - // multiple missing days in an interval followed by multiple row at the end @Test public void testMultipleMissingDaysMultipleRowAtTheEnd() { - List expectedDay1 = Collections.emptyList(); List expectedDay2 = Collections.singletonList(JAN_2_M_10); List expectedDay3 = Collections.emptyList(); @@ -422,32 +396,29 @@ public class RowBucketIterableTest Iterator iter = rbi.iterator(); RowBucket actual = iter.next(); - assertEquals(JAN_1, actual.getDateTime()); - assertEquals(expectedDay1, actual.getRows()); + Assert.assertEquals(JAN_1, actual.getDateTime()); + Assert.assertEquals(expectedDay1, actual.getRows()); actual = iter.next(); - assertEquals(JAN_2, actual.getDateTime()); - assertEquals(expectedDay2, actual.getRows()); + Assert.assertEquals(JAN_2, actual.getDateTime()); + Assert.assertEquals(expectedDay2, actual.getRows()); actual = iter.next(); - assertEquals(JAN_3, actual.getDateTime()); - assertEquals(expectedDay3, actual.getRows()); + Assert.assertEquals(JAN_3, actual.getDateTime()); + Assert.assertEquals(expectedDay3, actual.getRows()); actual = iter.next(); - assertEquals(JAN_4, actual.getDateTime()); - assertEquals(expectedDay4, actual.getRows()); + Assert.assertEquals(JAN_4, actual.getDateTime()); + Assert.assertEquals(expectedDay4, actual.getRows()); actual = iter.next(); - assertEquals(JAN_5, actual.getDateTime()); - assertEquals(expectedDay5, actual.getRows()); + Assert.assertEquals(JAN_5, actual.getDateTime()); + Assert.assertEquals(expectedDay5, actual.getRows()); } - - // missing day in the middle followed by single row @Test public void testMissingDaysInMiddleOneRow() { - List expectedDay1 = Collections.singletonList(JAN_1_M_10); List expectedDay2 = Collections.singletonList(JAN_2_M_10); List expectedDay3 = Collections.emptyList(); @@ -466,25 +437,22 @@ public class RowBucketIterableTest Iterator iter = rbi.iterator(); RowBucket actual = iter.next(); - assertEquals(expectedDay1, actual.getRows()); + Assert.assertEquals(expectedDay1, actual.getRows()); actual = iter.next(); - assertEquals(expectedDay2, actual.getRows()); + Assert.assertEquals(expectedDay2, actual.getRows()); actual = iter.next(); - assertEquals(JAN_3, actual.getDateTime()); - assertEquals(expectedDay3, actual.getRows()); + Assert.assertEquals(JAN_3, actual.getDateTime()); + Assert.assertEquals(expectedDay3, actual.getRows()); actual = iter.next(); - assertEquals(expectedDay4, actual.getRows()); - + Assert.assertEquals(expectedDay4, actual.getRows()); } - // missing day in the middle followed by multiple rows @Test public void testMissingDaysInMiddleMultipleRow() { - List expectedDay1 = Collections.singletonList(JAN_1_M_10); List expectedDay2 = Collections.emptyList(); List expectedDay3 = Collections.singletonList(JAN_3_M_10); @@ -503,28 +471,25 @@ public class RowBucketIterableTest Iterator iter = rbi.iterator(); RowBucket actual = iter.next(); - assertEquals(JAN_1, actual.getDateTime()); - assertEquals(expectedDay1, actual.getRows()); + Assert.assertEquals(JAN_1, actual.getDateTime()); + Assert.assertEquals(expectedDay1, actual.getRows()); actual = iter.next(); - assertEquals(JAN_2, actual.getDateTime()); - assertEquals(expectedDay2, actual.getRows()); + Assert.assertEquals(JAN_2, actual.getDateTime()); + Assert.assertEquals(expectedDay2, actual.getRows()); actual = iter.next(); - assertEquals(JAN_3, actual.getDateTime()); - assertEquals(expectedDay3, actual.getRows()); + Assert.assertEquals(JAN_3, actual.getDateTime()); + Assert.assertEquals(expectedDay3, actual.getRows()); actual = iter.next(); - assertEquals(JAN_4, actual.getDateTime()); - assertEquals(expectedDay4, actual.getRows()); - + Assert.assertEquals(JAN_4, actual.getDateTime()); + Assert.assertEquals(expectedDay4, actual.getRows()); } - // data missing for last day . @Test public void testApplyLastDayNoRows() { - intervals = new ArrayList<>(); intervals.add(INTERVAL_JAN_1_4); @@ -544,24 +509,22 @@ public class RowBucketIterableTest Iterator iter = rbi.iterator(); RowBucket actual = iter.next(); - assertEquals(expectedDay1, actual.getRows()); + Assert.assertEquals(expectedDay1, actual.getRows()); actual = iter.next(); - assertEquals(expectedDay2, actual.getRows()); + Assert.assertEquals(expectedDay2, actual.getRows()); actual = iter.next(); - assertEquals(expectedDay3, actual.getRows()); + Assert.assertEquals(expectedDay3, actual.getRows()); actual = iter.next(); - assertEquals(JAN_4, actual.getDateTime()); - assertEquals(expectedDay4, actual.getRows()); + Assert.assertEquals(JAN_4, actual.getDateTime()); + Assert.assertEquals(expectedDay4, actual.getRows()); } - // data missing for last two days @Test public void testApplyLastTwoDayNoRows() { - List expectedDay1 = Arrays.asList(JAN_1_M_10, JAN_1_F_20); List expectedDay2 = Collections.singletonList(JAN_2_M_10); List expectedDay3 = Collections.emptyList(); @@ -580,25 +543,23 @@ public class RowBucketIterableTest Iterator iter = rbi.iterator(); RowBucket actual = iter.next(); - assertEquals(expectedDay1, actual.getRows()); + Assert.assertEquals(expectedDay1, actual.getRows()); actual = iter.next(); - assertEquals(expectedDay2, actual.getRows()); + Assert.assertEquals(expectedDay2, actual.getRows()); actual = iter.next(); - assertEquals(JAN_3, actual.getDateTime()); - assertEquals(expectedDay3, actual.getRows()); + Assert.assertEquals(JAN_3, actual.getDateTime()); + Assert.assertEquals(expectedDay3, actual.getRows()); actual = iter.next(); - assertEquals(JAN_4, actual.getDateTime()); - assertEquals(expectedDay4, actual.getRows()); + Assert.assertEquals(JAN_4, actual.getDateTime()); + Assert.assertEquals(expectedDay4, actual.getRows()); } - @Test public void testApplyMultipleInterval() { - intervals = new ArrayList<>(); intervals.add(INTERVAL_JAN_1_4); intervals.add(INTERVAL_JAN_6_8); @@ -628,31 +589,30 @@ public class RowBucketIterableTest Iterator iter = rbi.iterator(); RowBucket actual = iter.next(); - assertEquals(expectedDay1, actual.getRows()); + Assert.assertEquals(expectedDay1, actual.getRows()); actual = iter.next(); - assertEquals(expectedDay2, actual.getRows()); + Assert.assertEquals(expectedDay2, actual.getRows()); actual = iter.next(); - assertEquals(expectedDay3, actual.getRows()); + Assert.assertEquals(expectedDay3, actual.getRows()); actual = iter.next(); - assertEquals(expectedDay4, actual.getRows()); + Assert.assertEquals(expectedDay4, actual.getRows()); actual = iter.next(); - assertEquals(expectedDay6, actual.getRows()); + Assert.assertEquals(expectedDay6, actual.getRows()); actual = iter.next(); - assertEquals(expectedDay7, actual.getRows()); + Assert.assertEquals(expectedDay7, actual.getRows()); actual = iter.next(); - assertEquals(expectedDay8, actual.getRows()); + Assert.assertEquals(expectedDay8, actual.getRows()); } @Test public void testNodata() { - intervals = new ArrayList<>(); intervals.add(INTERVAL_JAN_1_4); intervals.add(INTERVAL_JAN_6_8); @@ -663,8 +623,8 @@ public class RowBucketIterableTest RowBucketIterable rbi = new RowBucketIterable(seq, intervals, ONE_DAY); Iterator iter = rbi.iterator(); - assertTrue(iter.hasNext()); + Assert.assertTrue(iter.hasNext()); RowBucket actual = iter.next(); - assertEquals(Collections.emptyList(), actual.getRows()); + Assert.assertEquals(Collections.emptyList(), actual.getRows()); } } diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/BaseAveragerFactoryTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/BaseAveragerFactoryTest.java index 98104e675b0..30d97c81eb3 100644 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/BaseAveragerFactoryTest.java +++ b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/BaseAveragerFactoryTest.java @@ -19,17 +19,15 @@ package org.apache.druid.query.movingaverage.averagers; +import org.junit.Assert; import org.junit.Before; import org.junit.Test; import java.util.Comparator; import java.util.List; -import static org.junit.Assert.assertEquals; - public class BaseAveragerFactoryTest { - private AveragerFactory fac; @Before @@ -55,14 +53,14 @@ public class BaseAveragerFactoryTest public void testGetDependentFields() { List dependentFields = fac.getDependentFields(); - assertEquals(1, dependentFields.size()); - assertEquals("field", dependentFields.get(0)); + Assert.assertEquals(1, dependentFields.size()); + Assert.assertEquals("field", dependentFields.get(0)); } @Test public void testFinalization() { - Long input = Long.valueOf(5L); - assertEquals(input, fac.finalizeComputation(input)); + Long input = 5L; + Assert.assertEquals(input, fac.finalizeComputation(input)); } } diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/BaseAveragerTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/BaseAveragerTest.java index c6e960d84b8..b51f3e72fb9 100644 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/BaseAveragerTest.java +++ b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/BaseAveragerTest.java @@ -19,25 +19,17 @@ package org.apache.druid.query.movingaverage.averagers; +import org.junit.Assert; import org.junit.Test; import java.util.Collections; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotNull; -import static org.junit.Assert.assertNull; -import static org.junit.Assert.assertTrue; - -/** - * Test class for BaseAverager - */ public class BaseAveragerTest { public static class TestAverager extends BaseAverager { - public TestAverager(Class clazz, int b, String name, String field, int cycleSize) + TestAverager(Class clazz, int b, String name, String field, int cycleSize) { super(clazz, b, name, field, cycleSize); } @@ -54,10 +46,10 @@ public class BaseAveragerTest { BaseAverager avg = new TestAverager(Integer.class, 5, "test", "field", 1); - assertEquals("test", avg.getName()); - assertEquals(5, avg.getNumBuckets()); - assertEquals(5, avg.getBuckets().length); - assertTrue(avg.getBuckets().getClass().isArray()); + Assert.assertEquals("test", avg.getName()); + Assert.assertEquals(5, avg.getNumBuckets()); + Assert.assertEquals(5, avg.getBuckets().length); + Assert.assertTrue(avg.getBuckets().getClass().isArray()); } @Test @@ -67,24 +59,24 @@ public class BaseAveragerTest Object[] buckets = avg.getBuckets(); avg.addElement(Collections.singletonMap("field", 1), Collections.emptyMap()); - assertEquals(Integer.valueOf(1), buckets[0]); - assertNull(buckets[1]); - assertNull(buckets[2]); + Assert.assertEquals(1, buckets[0]); + Assert.assertNull(buckets[1]); + Assert.assertNull(buckets[2]); avg.addElement(Collections.singletonMap("field", 2), Collections.emptyMap()); - assertEquals(Integer.valueOf(1), buckets[0]); - assertEquals(Integer.valueOf(2), buckets[1]); - assertNull(buckets[2]); + Assert.assertEquals(1, buckets[0]); + Assert.assertEquals(2, buckets[1]); + Assert.assertNull(buckets[2]); avg.addElement(Collections.singletonMap("field", 3), Collections.emptyMap()); - assertEquals(Integer.valueOf(1), buckets[0]); - assertEquals(Integer.valueOf(2), buckets[1]); - assertEquals(Integer.valueOf(3), buckets[2]); + Assert.assertEquals(1, buckets[0]); + Assert.assertEquals(2, buckets[1]); + Assert.assertEquals(3, buckets[2]); avg.addElement(Collections.singletonMap("field", 4), Collections.emptyMap()); - assertEquals(Integer.valueOf(4), buckets[0]); - assertEquals(Integer.valueOf(2), buckets[1]); - assertEquals(Integer.valueOf(3), buckets[2]); + Assert.assertEquals(4, buckets[0]); + Assert.assertEquals(2, buckets[1]); + Assert.assertEquals(3, buckets[2]); } @Test @@ -97,32 +89,32 @@ public class BaseAveragerTest avg.addElement(Collections.singletonMap("field", 1), Collections.emptyMap()); avg.addElement(Collections.singletonMap("field", 1), Collections.emptyMap()); - assertEquals(Integer.valueOf(1), buckets[0]); - assertEquals(Integer.valueOf(1), buckets[1]); - assertEquals(Integer.valueOf(1), buckets[2]); + Assert.assertEquals(1, buckets[0]); + Assert.assertEquals(1, buckets[1]); + Assert.assertEquals(1, buckets[2]); avg.skip(); - assertNull(buckets[0]); - assertNotNull(buckets[1]); - assertNotNull(buckets[2]); + Assert.assertNull(buckets[0]); + Assert.assertNotNull(buckets[1]); + Assert.assertNotNull(buckets[2]); avg.skip(); - assertNull(buckets[0]); - assertNull(buckets[1]); - assertNotNull(buckets[2]); + Assert.assertNull(buckets[0]); + Assert.assertNull(buckets[1]); + Assert.assertNotNull(buckets[2]); avg.skip(); - assertNull(buckets[0]); - assertNull(buckets[1]); - assertNull(buckets[2]); + Assert.assertNull(buckets[0]); + Assert.assertNull(buckets[1]); + Assert.assertNull(buckets[2]); // poke some test data into the array - buckets[0] = Integer.valueOf(1); + buckets[0] = 1; avg.skip(); - assertNull(buckets[0]); - assertNull(buckets[1]); - assertNull(buckets[2]); + Assert.assertNull(buckets[0]); + Assert.assertNull(buckets[1]); + Assert.assertNull(buckets[2]); } @Test @@ -130,16 +122,16 @@ public class BaseAveragerTest { BaseAverager avg = new TestAverager(Integer.class, 3, "test", "field", 1); - assertFalse(avg.hasData()); + Assert.assertFalse(avg.hasData()); avg.addElement(Collections.singletonMap("field", 1), Collections.emptyMap()); - assertTrue(avg.hasData()); + Assert.assertTrue(avg.hasData()); avg.skip(); avg.skip(); avg.skip(); - assertFalse(avg.hasData()); + Assert.assertFalse(avg.hasData()); } @Test @@ -147,10 +139,9 @@ public class BaseAveragerTest { BaseAverager avg = new TestAverager(Integer.class, 3, "test", "field", 1); - assertNull(avg.getResult()); + Assert.assertNull(avg.getResult()); avg.addElement(Collections.singletonMap("field", 1), Collections.emptyMap()); - assertEquals(Integer.valueOf(1), avg.getResult()); + Assert.assertEquals(Integer.valueOf(1), avg.getResult()); } - } diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMaxAveragerFactoryTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMaxAveragerFactoryTest.java index 773cae4843b..ef8c8a76040 100644 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMaxAveragerFactoryTest.java +++ b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMaxAveragerFactoryTest.java @@ -19,20 +19,16 @@ package org.apache.druid.query.movingaverage.averagers; +import org.hamcrest.CoreMatchers; +import org.junit.Assert; import org.junit.Test; -import static org.hamcrest.CoreMatchers.instanceOf; -import static org.junit.Assert.assertThat; - - public class DoubleMaxAveragerFactoryTest { - @Test public void testCreateAverager() { AveragerFactory fac = new DoubleMaxAveragerFactory("test", 5, 1, "field"); - assertThat(fac.createAverager(), instanceOf(DoubleMaxAverager.class)); + Assert.assertThat(fac.createAverager(), CoreMatchers.instanceOf(DoubleMaxAverager.class)); } - } diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMaxAveragerTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMaxAveragerTest.java index e1ba10fad19..95034d02101 100644 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMaxAveragerTest.java +++ b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMaxAveragerTest.java @@ -19,39 +19,36 @@ package org.apache.druid.query.movingaverage.averagers; +import org.junit.Assert; import org.junit.Test; import java.util.Collections; import java.util.HashMap; -import static org.junit.Assert.assertEquals; - public class DoubleMaxAveragerTest { - @Test public void testComputeResult() { BaseAverager avg = new DoubleMaxAverager(3, "test", "field", 1); - assertEquals(Double.NEGATIVE_INFINITY, avg.computeResult(), 0.0); + Assert.assertEquals(Double.NEGATIVE_INFINITY, avg.computeResult(), 0.0); avg.addElement(Collections.singletonMap("field", -1.1e100), new HashMap<>()); - assertEquals(-1.1e100, avg.computeResult(), 0.0); + Assert.assertEquals(-1.1e100, avg.computeResult(), 0.0); avg.addElement(Collections.singletonMap("field", 1.0), new HashMap<>()); - assertEquals(1.0, avg.computeResult(), 0.0); + Assert.assertEquals(1.0, avg.computeResult(), 0.0); - avg.addElement(Collections.singletonMap("field", new Integer(1)), new HashMap<>()); - assertEquals(1.0, avg.computeResult(), 0.0); + avg.addElement(Collections.singletonMap("field", 1), new HashMap<>()); + Assert.assertEquals(1.0, avg.computeResult(), 0.0); avg.addElement(Collections.singletonMap("field", 5.0), new HashMap<>()); avg.addElement(Collections.singletonMap("field", 3.0), new HashMap<>()); avg.addElement(Collections.singletonMap("field", 2.0), new HashMap<>()); - assertEquals(5.0, avg.computeResult(), 0.0); + Assert.assertEquals(5.0, avg.computeResult(), 0.0); avg.skip(); - assertEquals(3.0, avg.computeResult(), 0.0); + Assert.assertEquals(3.0, avg.computeResult(), 0.0); } - } diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAveragerFactoryTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAveragerFactoryTest.java index 68d9b67024f..d4d85f44550 100644 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAveragerFactoryTest.java +++ b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAveragerFactoryTest.java @@ -19,19 +19,16 @@ package org.apache.druid.query.movingaverage.averagers; +import org.hamcrest.core.IsInstanceOf; +import org.junit.Assert; import org.junit.Test; -import static org.hamcrest.core.IsInstanceOf.instanceOf; -import static org.junit.Assert.assertThat; - public class DoubleMeanAveragerFactoryTest { - @Test public void testCreateAverager() { AveragerFactory fac = new DoubleMeanAveragerFactory("test", 5, 1, "field"); - assertThat(fac.createAverager(), instanceOf(DoubleMeanAverager.class)); + Assert.assertThat(fac.createAverager(), IsInstanceOf.instanceOf(DoubleMeanAverager.class)); } - } diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAveragerTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAveragerTest.java index 0d5f2c7cc8f..bc3d20d19dd 100644 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAveragerTest.java +++ b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAveragerTest.java @@ -19,40 +19,36 @@ package org.apache.druid.query.movingaverage.averagers; +import org.junit.Assert; import org.junit.Test; import java.util.Collections; import java.util.HashMap; -import static org.junit.Assert.assertEquals; - public class DoubleMeanAveragerTest { - @Test public void testComputeResult() { BaseAverager avg = new DoubleMeanAverager(3, "test", "field", 1); - assertEquals(0.0, avg.computeResult(), 0.0); + Assert.assertEquals(0.0, avg.computeResult(), 0.0); avg.addElement(Collections.singletonMap("field", 3.0), new HashMap<>()); - assertEquals(1.0, avg.computeResult(), 0.0); + Assert.assertEquals(1.0, avg.computeResult(), 0.0); avg.addElement(Collections.singletonMap("field", 3.0), new HashMap<>()); - assertEquals(2.0, avg.computeResult(), 0.0); + Assert.assertEquals(2.0, avg.computeResult(), 0.0); - avg.addElement(Collections.singletonMap("field", new Integer(0)), new HashMap<>()); - assertEquals(2.0, avg.computeResult(), 0.0); + avg.addElement(Collections.singletonMap("field", 0), new HashMap<>()); + Assert.assertEquals(2.0, avg.computeResult(), 0.0); avg.addElement(Collections.singletonMap("field", 2.0), new HashMap<>()); avg.addElement(Collections.singletonMap("field", 2.0), new HashMap<>()); avg.addElement(Collections.singletonMap("field", 2.0), new HashMap<>()); - assertEquals(2.0, avg.computeResult(), 0.0); + Assert.assertEquals(2.0, avg.computeResult(), 0.0); avg.skip(); - assertEquals(4.0 / 3, avg.computeResult(), 0.0); - + Assert.assertEquals(4.0 / 3, avg.computeResult(), 0.0); } - } diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAveragerWithPeriodTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAveragerWithPeriodTest.java index 8cde307c88b..ec3ffdacea9 100644 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAveragerWithPeriodTest.java +++ b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanAveragerWithPeriodTest.java @@ -19,16 +19,14 @@ package org.apache.druid.query.movingaverage.averagers; +import org.junit.Assert; import org.junit.Test; import java.util.Collections; import java.util.HashMap; -import static org.junit.Assert.assertEquals; - public class DoubleMeanAveragerWithPeriodTest { - @Test public void testComputeResult() { @@ -49,10 +47,10 @@ public class DoubleMeanAveragerWithPeriodTest averager.addElement(Collections.singletonMap("field", 5.0), new HashMap<>()); averager.addElement(Collections.singletonMap("field", 6.0), new HashMap<>()); - assertEquals(7, averager.computeResult(), 0.0); // (7+7)/2 + Assert.assertEquals(7, averager.computeResult(), 0.0); // (7+7)/2 averager.addElement(Collections.singletonMap("field", 3.0), new HashMap<>()); - assertEquals(1, averager.computeResult(), 0.0); // (1+1)/2 + Assert.assertEquals(1, averager.computeResult(), 0.0); // (1+1)/2 BaseAverager averager1 = new DoubleMeanAverager(14, "test", "field", 3); @@ -71,11 +69,10 @@ public class DoubleMeanAveragerWithPeriodTest averager1.addElement(Collections.singletonMap("field", 1.0), new HashMap<>()); averager1.addElement(Collections.singletonMap("field", 2.0), new HashMap<>()); - assertEquals(1, averager1.computeResult(), 0.0); // (1+1+1+1+1)/5 + Assert.assertEquals(1, averager1.computeResult(), 0.0); // (1+1+1+1+1)/5 - assertEquals(2, averager1.computeResult(), 0.0); // (2+2+2+2+2)/5 - - assertEquals(13.0 / 5, averager1.computeResult(), 0.0); // (3+3+3+3+1)/5 + Assert.assertEquals(2, averager1.computeResult(), 0.0); // (2+2+2+2+2)/5 + Assert.assertEquals(13.0 / 5, averager1.computeResult(), 0.0); // (3+3+3+3+1)/5 } } diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanNoNullAveragerFactoryTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanNoNullAveragerFactoryTest.java index 9359fc2cefe..6afbd478b52 100644 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanNoNullAveragerFactoryTest.java +++ b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanNoNullAveragerFactoryTest.java @@ -19,19 +19,16 @@ package org.apache.druid.query.movingaverage.averagers; +import org.hamcrest.core.IsInstanceOf; +import org.junit.Assert; import org.junit.Test; -import static org.hamcrest.core.IsInstanceOf.instanceOf; -import static org.junit.Assert.assertThat; - public class DoubleMeanNoNullAveragerFactoryTest { - @Test public void testCreateAverager() { AveragerFactory fac = new DoubleMeanNoNullAveragerFactory("test", 5, 1, "field"); - assertThat(fac.createAverager(), instanceOf(DoubleMeanNoNullAverager.class)); + Assert.assertThat(fac.createAverager(), IsInstanceOf.instanceOf(DoubleMeanNoNullAverager.class)); } - } diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanNoNullAveragerTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanNoNullAveragerTest.java index 6d946e4835c..190fc84e1eb 100644 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanNoNullAveragerTest.java +++ b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMeanNoNullAveragerTest.java @@ -19,45 +19,43 @@ package org.apache.druid.query.movingaverage.averagers; +import org.junit.Assert; import org.junit.Test; import java.util.Collections; import java.util.HashMap; -import static org.junit.Assert.assertEquals; - public class DoubleMeanNoNullAveragerTest { - @Test public void testComputeResult() { BaseAverager avg = new DoubleMeanNoNullAverager(3, "test", "field", 1); - assertEquals(Double.NaN, avg.computeResult(), 0.0); + Assert.assertEquals(Double.NaN, avg.computeResult(), 0.0); avg.addElement(Collections.singletonMap("field", 3.0), new HashMap<>()); - assertEquals(3.0, avg.computeResult(), 0.0); + Assert.assertEquals(3.0, avg.computeResult(), 0.0); avg.addElement(Collections.singletonMap("field", 3.0), new HashMap<>()); - assertEquals(3.0, avg.computeResult(), 0.0); + Assert.assertEquals(3.0, avg.computeResult(), 0.0); - avg.addElement(Collections.singletonMap("field", new Integer(0)), new HashMap<>()); - assertEquals(2.0, avg.computeResult(), 0.0); + avg.addElement(Collections.singletonMap("field", 0), new HashMap<>()); + Assert.assertEquals(2.0, avg.computeResult(), 0.0); avg.addElement(Collections.singletonMap("field", 2.0), new HashMap<>()); avg.addElement(Collections.singletonMap("field", 2.0), new HashMap<>()); avg.addElement(Collections.singletonMap("field", 2.0), new HashMap<>()); - assertEquals(2.0, avg.computeResult(), 0.0); + Assert.assertEquals(2.0, avg.computeResult(), 0.0); avg.skip(); - assertEquals(2.0, avg.computeResult(), 0.0); + Assert.assertEquals(2.0, avg.computeResult(), 0.0); // testing cycleSize functionality BaseAverager averager = new DoubleMeanNoNullAverager(14, "test", "field", 7); averager.addElement(Collections.singletonMap("field", 2.0), new HashMap<>()); - assertEquals(2.0, averager.computeResult(), 0.0); + Assert.assertEquals(2.0, averager.computeResult(), 0.0); averager.addElement(Collections.singletonMap("field", 4.0), new HashMap<>()); averager.addElement(Collections.singletonMap("field", 5.0), new HashMap<>()); @@ -73,10 +71,9 @@ public class DoubleMeanNoNullAveragerTest averager.addElement(Collections.singletonMap("field", 15.0), new HashMap<>()); averager.addElement(Collections.singletonMap("field", 16.0), new HashMap<>()); - assertEquals(7.5, averager.computeResult(), 0.0); + Assert.assertEquals(7.5, averager.computeResult(), 0.0); averager.addElement(Collections.singletonMap("field", 3.0), new HashMap<>()); - assertEquals(8.5, averager.computeResult(), 0.0); + Assert.assertEquals(8.5, averager.computeResult(), 0.0); } - } diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMinAveragerFactoryTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMinAveragerFactoryTest.java index ef2bb6f0882..61250215afe 100644 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMinAveragerFactoryTest.java +++ b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMinAveragerFactoryTest.java @@ -19,19 +19,16 @@ package org.apache.druid.query.movingaverage.averagers; +import org.hamcrest.core.IsInstanceOf; +import org.junit.Assert; import org.junit.Test; -import static org.hamcrest.core.IsInstanceOf.instanceOf; -import static org.junit.Assert.assertThat; - public class DoubleMinAveragerFactoryTest { - @Test public void testCreateAverager() { AveragerFactory fac = new DoubleMinAveragerFactory("test", 5, 1, "field"); - assertThat(fac.createAverager(), instanceOf(DoubleMinAverager.class)); + Assert.assertThat(fac.createAverager(), IsInstanceOf.instanceOf(DoubleMinAverager.class)); } - } diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMinAveragerTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMinAveragerTest.java index 02fd2c2cec8..0fcabf52f6f 100644 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMinAveragerTest.java +++ b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/DoubleMinAveragerTest.java @@ -19,40 +19,37 @@ package org.apache.druid.query.movingaverage.averagers; +import org.junit.Assert; import org.junit.Test; import java.util.Collections; import java.util.HashMap; -import static org.junit.Assert.assertEquals; - public class DoubleMinAveragerTest { - @Test public void testComputeResult() { BaseAverager avg = new DoubleMinAverager(3, "test", "field", 1); - assertEquals(Double.POSITIVE_INFINITY, avg.computeResult(), 0.0); + Assert.assertEquals(Double.POSITIVE_INFINITY, avg.computeResult(), 0.0); avg.addElement(Collections.singletonMap("field", -1.1e100), new HashMap<>()); - assertEquals(-1.1e100, avg.computeResult(), 0.0); + Assert.assertEquals(-1.1e100, avg.computeResult(), 0.0); avg.addElement(Collections.singletonMap("field", 1.0), new HashMap<>()); - assertEquals(-1.1e100, avg.computeResult(), 0.0); + Assert.assertEquals(-1.1e100, avg.computeResult(), 0.0); avg.addElement(Collections.singletonMap("field", new Integer(1)), new HashMap<>()); - assertEquals(-1.1e100, avg.computeResult(), 0.0); + Assert.assertEquals(-1.1e100, avg.computeResult(), 0.0); avg.addElement(Collections.singletonMap("field", 5.0), new HashMap<>()); avg.addElement(Collections.singletonMap("field", 2.0), new HashMap<>()); avg.addElement(Collections.singletonMap("field", 3.0), new HashMap<>()); - assertEquals(2.0, avg.computeResult(), 0.0); + Assert.assertEquals(2.0, avg.computeResult(), 0.0); avg.skip(); avg.skip(); - assertEquals(3.0, avg.computeResult(), 0.0); + Assert.assertEquals(3.0, avg.computeResult(), 0.0); } - } diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMaxAveragerFactoryTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMaxAveragerFactoryTest.java index 7601a5d4915..0f429e787cf 100644 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMaxAveragerFactoryTest.java +++ b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMaxAveragerFactoryTest.java @@ -19,19 +19,16 @@ package org.apache.druid.query.movingaverage.averagers; +import org.hamcrest.core.IsInstanceOf; +import org.junit.Assert; import org.junit.Test; -import static org.hamcrest.core.IsInstanceOf.instanceOf; -import static org.junit.Assert.assertThat; - public class LongMaxAveragerFactoryTest { - @Test public void testCreateAverager() { AveragerFactory fac = new LongMaxAveragerFactory("test", 5, 1, "field"); - assertThat(fac.createAverager(), instanceOf(LongMaxAverager.class)); + Assert.assertThat(fac.createAverager(), IsInstanceOf.instanceOf(LongMaxAverager.class)); } - } diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMaxAveragerTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMaxAveragerTest.java index c799a1ad60b..825d1c1cac2 100644 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMaxAveragerTest.java +++ b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMaxAveragerTest.java @@ -19,39 +19,36 @@ package org.apache.druid.query.movingaverage.averagers; +import org.junit.Assert; import org.junit.Test; import java.util.Collections; import java.util.HashMap; -import static org.junit.Assert.assertEquals; - public class LongMaxAveragerTest { - @Test public void testComputeResult() { BaseAverager avg = new LongMaxAverager(3, "test", "field", 1); - assertEquals(Long.MIN_VALUE, (long) avg.computeResult()); + Assert.assertEquals(Long.MIN_VALUE, (long) avg.computeResult()); avg.addElement(Collections.singletonMap("field", -1000000L), new HashMap<>()); - assertEquals(-1000000, (long) avg.computeResult()); + Assert.assertEquals(-1000000, (long) avg.computeResult()); avg.addElement(Collections.singletonMap("field", 1L), new HashMap<>()); - assertEquals(1, (long) avg.computeResult()); + Assert.assertEquals(1, (long) avg.computeResult()); - avg.addElement(Collections.singletonMap("field", new Integer(1)), new HashMap<>()); - assertEquals(1, (long) avg.computeResult()); + avg.addElement(Collections.singletonMap("field", 1), new HashMap<>()); + Assert.assertEquals(1, (long) avg.computeResult()); avg.addElement(Collections.singletonMap("field", 5L), new HashMap<>()); avg.addElement(Collections.singletonMap("field", 3L), new HashMap<>()); avg.addElement(Collections.singletonMap("field", 2L), new HashMap<>()); - assertEquals(5, (long) avg.computeResult()); + Assert.assertEquals(5, (long) avg.computeResult()); avg.skip(); - assertEquals(3, (long) avg.computeResult()); + Assert.assertEquals(3, (long) avg.computeResult()); } - } diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanAveragerFactoryTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanAveragerFactoryTest.java index 763d9c75321..3b5b3e56be7 100644 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanAveragerFactoryTest.java +++ b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanAveragerFactoryTest.java @@ -19,19 +19,16 @@ package org.apache.druid.query.movingaverage.averagers; +import org.hamcrest.core.IsInstanceOf; +import org.junit.Assert; import org.junit.Test; -import static org.hamcrest.core.IsInstanceOf.instanceOf; -import static org.junit.Assert.assertThat; - public class LongMeanAveragerFactoryTest { - @Test public void testCreateAverager() { AveragerFactory fac = new LongMeanAveragerFactory("test", 5, 1, "field"); - assertThat(fac.createAverager(), instanceOf(LongMeanAverager.class)); + Assert.assertThat(fac.createAverager(), IsInstanceOf.instanceOf(LongMeanAverager.class)); } - } diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanAveragerTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanAveragerTest.java index cb037a23314..8ff63de2e80 100644 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanAveragerTest.java +++ b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanAveragerTest.java @@ -19,39 +19,36 @@ package org.apache.druid.query.movingaverage.averagers; +import org.junit.Assert; import org.junit.Test; import java.util.Collections; import java.util.HashMap; -import static org.junit.Assert.assertEquals; - public class LongMeanAveragerTest { - @Test public void testComputeResult() { BaseAverager avg = new LongMeanAverager(3, "test", "field", 1); - assertEquals(0.0, avg.computeResult(), 0.0); + Assert.assertEquals(0.0, avg.computeResult(), 0.0); avg.addElement(Collections.singletonMap("field", 3L), new HashMap<>()); - assertEquals(1.0, avg.computeResult(), 0.0); + Assert.assertEquals(1.0, avg.computeResult(), 0.0); avg.addElement(Collections.singletonMap("field", 3L), new HashMap<>()); - assertEquals(2.0, avg.computeResult(), 0.0); + Assert.assertEquals(2.0, avg.computeResult(), 0.0); avg.addElement(Collections.singletonMap("field", 3), new HashMap<>()); - assertEquals(3.0, avg.computeResult(), 0.0); + Assert.assertEquals(3.0, avg.computeResult(), 0.0); avg.addElement(Collections.singletonMap("field", 2L), new HashMap<>()); avg.addElement(Collections.singletonMap("field", 2L), new HashMap<>()); avg.addElement(Collections.singletonMap("field", 2L), new HashMap<>()); - assertEquals(2.0, avg.computeResult(), 0.0); + Assert.assertEquals(2.0, avg.computeResult(), 0.0); avg.skip(); - assertEquals(4.0 / 3, avg.computeResult(), 0.0); + Assert.assertEquals(4.0 / 3, avg.computeResult(), 0.0); } - } diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanNoNullAveragerFactoryTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanNoNullAveragerFactoryTest.java index f3c4dac4902..fb3e33eaa5e 100644 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanNoNullAveragerFactoryTest.java +++ b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanNoNullAveragerFactoryTest.java @@ -19,19 +19,16 @@ package org.apache.druid.query.movingaverage.averagers; +import org.hamcrest.core.IsInstanceOf; +import org.junit.Assert; import org.junit.Test; -import static org.hamcrest.core.IsInstanceOf.instanceOf; -import static org.junit.Assert.assertThat; - public class LongMeanNoNullAveragerFactoryTest { - @Test public void testCreateAverager() { AveragerFactory fac = new LongMeanNoNullAveragerFactory("test", 5, 1, "field"); - assertThat(fac.createAverager(), instanceOf(LongMeanNoNullAverager.class)); + Assert.assertThat(fac.createAverager(), IsInstanceOf.instanceOf(LongMeanNoNullAverager.class)); } - } diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanNoNullAveragerTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanNoNullAveragerTest.java index 0681db77ffc..2c16d052f71 100644 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanNoNullAveragerTest.java +++ b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMeanNoNullAveragerTest.java @@ -19,39 +19,37 @@ package org.apache.druid.query.movingaverage.averagers; +import org.junit.Assert; import org.junit.Test; import java.util.Collections; import java.util.HashMap; -import static org.junit.Assert.assertEquals; - public class LongMeanNoNullAveragerTest { - @Test public void testComputeResult() { BaseAverager avg = new LongMeanNoNullAverager(3, "test", "field", 1); - assertEquals(Double.NaN, avg.computeResult(), 0.0); + Assert.assertEquals(Double.NaN, avg.computeResult(), 0.0); avg.addElement(Collections.singletonMap("field", 3L), new HashMap<>()); - assertEquals(3.0, avg.computeResult(), 0.0); + Assert.assertEquals(3.0, avg.computeResult(), 0.0); avg.addElement(Collections.singletonMap("field", 3L), new HashMap<>()); - assertEquals(3.0, avg.computeResult(), 0.0); + Assert.assertEquals(3.0, avg.computeResult(), 0.0); - avg.addElement(Collections.singletonMap("field", new Integer(0)), new HashMap<>()); - assertEquals(2.0, avg.computeResult(), 0.0); + avg.addElement(Collections.singletonMap("field", 0), new HashMap<>()); + Assert.assertEquals(2.0, avg.computeResult(), 0.0); avg.addElement(Collections.singletonMap("field", 2L), new HashMap<>()); avg.addElement(Collections.singletonMap("field", 2L), new HashMap<>()); avg.addElement(Collections.singletonMap("field", 2L), new HashMap<>()); - assertEquals(2.0, avg.computeResult(), 0.0); + Assert.assertEquals(2.0, avg.computeResult(), 0.0); avg.skip(); - assertEquals(2.0, avg.computeResult(), 0.0); + Assert.assertEquals(2.0, avg.computeResult(), 0.0); } } diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMinAveragerFactoryTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMinAveragerFactoryTest.java index 067f6b223a1..9dca156bc63 100644 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMinAveragerFactoryTest.java +++ b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMinAveragerFactoryTest.java @@ -19,19 +19,16 @@ package org.apache.druid.query.movingaverage.averagers; +import org.hamcrest.core.IsInstanceOf; +import org.junit.Assert; import org.junit.Test; -import static org.hamcrest.core.IsInstanceOf.instanceOf; -import static org.junit.Assert.assertThat; - public class LongMinAveragerFactoryTest { - @Test public void testCreateAverager() { AveragerFactory fac = new LongMinAveragerFactory("test", 5, 1, "field"); - assertThat(fac.createAverager(), instanceOf(LongMinAverager.class)); + Assert.assertThat(fac.createAverager(), IsInstanceOf.instanceOf(LongMinAverager.class)); } - } diff --git a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMinAveragerTest.java b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMinAveragerTest.java index 4cbcdaed4c0..cdf80d76ea8 100644 --- a/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMinAveragerTest.java +++ b/extensions-contrib/moving-average-query/src/test/java/org/apache/druid/query/movingaverage/averagers/LongMinAveragerTest.java @@ -19,40 +19,37 @@ package org.apache.druid.query.movingaverage.averagers; +import org.junit.Assert; import org.junit.Test; import java.util.Collections; import java.util.HashMap; -import static org.junit.Assert.assertEquals; - public class LongMinAveragerTest { - @Test public void testComputeResult() { BaseAverager avg = new LongMinAverager(3, "test", "field", 1); - assertEquals(Long.MAX_VALUE, (long) avg.computeResult()); + Assert.assertEquals(Long.MAX_VALUE, (long) avg.computeResult()); avg.addElement(Collections.singletonMap("field", -10000L), new HashMap<>()); - assertEquals(-10000, (long) avg.computeResult()); + Assert.assertEquals(-10000, (long) avg.computeResult()); avg.addElement(Collections.singletonMap("field", 1L), new HashMap<>()); - assertEquals(-10000, (long) avg.computeResult()); + Assert.assertEquals(-10000, (long) avg.computeResult()); - avg.addElement(Collections.singletonMap("field", new Integer(1000)), new HashMap<>()); - assertEquals(-10000, (long) avg.computeResult()); + avg.addElement(Collections.singletonMap("field", 1000), new HashMap<>()); + Assert.assertEquals(-10000, (long) avg.computeResult()); avg.addElement(Collections.singletonMap("field", 5L), new HashMap<>()); avg.addElement(Collections.singletonMap("field", 2L), new HashMap<>()); avg.addElement(Collections.singletonMap("field", 3L), new HashMap<>()); - assertEquals(2, (long) avg.computeResult()); + Assert.assertEquals(2, (long) avg.computeResult()); avg.skip(); avg.skip(); - assertEquals(3, (long) avg.computeResult()); + Assert.assertEquals(3, (long) avg.computeResult()); } - } diff --git a/extensions-contrib/opentsdb-emitter/src/test/java/org/apache/druid/emitter/opentsdb/EventConverterTest.java b/extensions-contrib/opentsdb-emitter/src/test/java/org/apache/druid/emitter/opentsdb/EventConverterTest.java index 7b3f205c141..d241d309957 100644 --- a/extensions-contrib/opentsdb-emitter/src/test/java/org/apache/druid/emitter/opentsdb/EventConverterTest.java +++ b/extensions-contrib/opentsdb-emitter/src/test/java/org/apache/druid/emitter/opentsdb/EventConverterTest.java @@ -30,8 +30,6 @@ import org.junit.Test; import java.util.HashMap; import java.util.Map; -import static org.junit.Assert.assertEquals; - public class EventConverterTest { private EventConverter converter; @@ -66,16 +64,16 @@ public class EventConverterTest expectedTags.put("type", "groupBy"); OpentsdbEvent opentsdbEvent = converter.convert(configuredEvent); - assertEquals("query.time", opentsdbEvent.getMetric()); - assertEquals(dateTime.getMillis() / 1000L, opentsdbEvent.getTimestamp()); - assertEquals(10, opentsdbEvent.getValue()); - assertEquals(expectedTags, opentsdbEvent.getTags()); + Assert.assertEquals("query.time", opentsdbEvent.getMetric()); + Assert.assertEquals(dateTime.getMillis() / 1000L, opentsdbEvent.getTimestamp()); + Assert.assertEquals(10, opentsdbEvent.getValue()); + Assert.assertEquals(expectedTags, opentsdbEvent.getTags()); ServiceMetricEvent notConfiguredEvent = new ServiceMetricEvent.Builder() .setDimension("dataSource", "data-source") .setDimension("type", "groupBy") .build(dateTime, "foo/bar", 10) .build("broker", "brokerHost1"); - assertEquals(null, converter.convert(notConfiguredEvent)); + Assert.assertNull(converter.convert(notConfiguredEvent)); } } diff --git a/extensions-contrib/statsd-emitter/src/test/java/org/apache/druid/emitter/statsd/DimensionConverterTest.java b/extensions-contrib/statsd-emitter/src/test/java/org/apache/druid/emitter/statsd/DimensionConverterTest.java index 8efdb768639..c6bb65281bb 100644 --- a/extensions-contrib/statsd-emitter/src/test/java/org/apache/druid/emitter/statsd/DimensionConverterTest.java +++ b/extensions-contrib/statsd-emitter/src/test/java/org/apache/druid/emitter/statsd/DimensionConverterTest.java @@ -23,12 +23,9 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.collect.ImmutableMap; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; +import org.junit.Assert; import org.junit.Test; -import static org.junit.Assert.assertEquals; - -/** - */ public class DimensionConverterTest { @Test @@ -56,10 +53,10 @@ public class DimensionConverterTest event.getUserDims(), actual ); - assertEquals("correct StatsDMetric.Type", StatsDMetric.Type.timer, statsDMetric.type); + Assert.assertEquals("correct StatsDMetric.Type", StatsDMetric.Type.timer, statsDMetric.type); ImmutableMap.Builder expected = new ImmutableMap.Builder<>(); expected.put("dataSource", "data-source"); expected.put("type", "groupBy"); - assertEquals("correct Dimensions", expected.build(), actual.build()); + Assert.assertEquals("correct Dimensions", expected.build(), actual.build()); } } diff --git a/extensions-contrib/statsd-emitter/src/test/java/org/apache/druid/emitter/statsd/StatsDEmitterTest.java b/extensions-contrib/statsd-emitter/src/test/java/org/apache/druid/emitter/statsd/StatsDEmitterTest.java index 4bc763becf2..40f37ea5f1c 100644 --- a/extensions-contrib/statsd-emitter/src/test/java/org/apache/druid/emitter/statsd/StatsDEmitterTest.java +++ b/extensions-contrib/statsd-emitter/src/test/java/org/apache/druid/emitter/statsd/StatsDEmitterTest.java @@ -23,65 +23,60 @@ import com.fasterxml.jackson.databind.ObjectMapper; import com.timgroup.statsd.StatsDClient; import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; +import org.easymock.EasyMock; import org.junit.Test; -import static org.easymock.EasyMock.createMock; -import static org.easymock.EasyMock.replay; -import static org.easymock.EasyMock.verify; - -/** - */ public class StatsDEmitterTest { @Test public void testConvertRange() { - StatsDClient client = createMock(StatsDClient.class); + StatsDClient client = EasyMock.createMock(StatsDClient.class); StatsDEmitter emitter = new StatsDEmitter( new StatsDEmitterConfig("localhost", 8888, null, null, null, null, null, null, null), new ObjectMapper(), client ); - client.gauge("broker.query.cache.total.hitRate", 54, new String[0]); - replay(client); + client.gauge("broker.query.cache.total.hitRate", 54); + EasyMock.replay(client); emitter.emit(new ServiceMetricEvent.Builder() .setDimension("dataSource", "data-source") .build(DateTimes.nowUtc(), "query/cache/total/hitRate", 0.54) .build("broker", "brokerHost1") ); - verify(client); + EasyMock.verify(client); } @Test public void testConvertRangeWithDogstatsd() { - StatsDClient client = createMock(StatsDClient.class); + StatsDClient client = EasyMock.createMock(StatsDClient.class); StatsDEmitter emitter = new StatsDEmitter( new StatsDEmitterConfig("localhost", 8888, null, null, null, null, null, true, null), new ObjectMapper(), client ); - client.gauge("broker.query.cache.total.hitRate", 0.54, new String[0]); - replay(client); + client.gauge("broker.query.cache.total.hitRate", 0.54); + EasyMock.replay(client); emitter.emit(new ServiceMetricEvent.Builder() .setDimension("dataSource", "data-source") .build(DateTimes.nowUtc(), "query/cache/total/hitRate", 0.54) .build("broker", "brokerHost1") ); - verify(client); + EasyMock.verify(client); } @Test public void testNoConvertRange() { - StatsDClient client = createMock(StatsDClient.class); + StatsDClient client = EasyMock.createMock(StatsDClient.class); StatsDEmitter emitter = new StatsDEmitter( new StatsDEmitterConfig("localhost", 8888, null, null, null, null, null, null, null), new ObjectMapper(), client ); - client.time("broker.query.time.data-source.groupBy", 10, new String[0]); - replay(client); + client.time("broker.query.time.data-source.groupBy", 10); + EasyMock.replay(client); emitter.emit(new ServiceMetricEvent.Builder() .setDimension("dataSource", "data-source") .setDimension("type", "groupBy") @@ -96,20 +91,20 @@ public class StatsDEmitterTest .build(DateTimes.nowUtc(), "query/time", 10) .build("broker", "brokerHost1") ); - verify(client); + EasyMock.verify(client); } @Test public void testConfigOptions() { - StatsDClient client = createMock(StatsDClient.class); + StatsDClient client = EasyMock.createMock(StatsDClient.class); StatsDEmitter emitter = new StatsDEmitter( new StatsDEmitterConfig("localhost", 8888, null, "#", true, null, null, null, null), new ObjectMapper(), client ); - client.time("brokerHost1#broker#query#time#data-source#groupBy", 10, new String[0]); - replay(client); + client.time("brokerHost1#broker#query#time#data-source#groupBy", 10); + EasyMock.replay(client); emitter.emit(new ServiceMetricEvent.Builder() .setDimension("dataSource", "data-source") .setDimension("type", "groupBy") @@ -124,21 +119,22 @@ public class StatsDEmitterTest .build(DateTimes.nowUtc(), "query/time", 10) .build("broker", "brokerHost1") ); - verify(client); + EasyMock.verify(client); } @Test public void testDogstatsdEnabled() { - StatsDClient client = createMock(StatsDClient.class); + StatsDClient client = EasyMock.createMock(StatsDClient.class); StatsDEmitter emitter = new StatsDEmitter( new StatsDEmitterConfig("localhost", 8888, null, "#", true, null, null, true, null), new ObjectMapper(), client ); client.time("broker#query#time", 10, - new String[] {"dataSource:data-source", "type:groupBy", "hostname:brokerHost1"}); - replay(client); + "dataSource:data-source", "type:groupBy", "hostname:brokerHost1" + ); + EasyMock.replay(client); emitter.emit(new ServiceMetricEvent.Builder() .setDimension("dataSource", "data-source") .setDimension("type", "groupBy") @@ -153,25 +149,25 @@ public class StatsDEmitterTest .build(DateTimes.nowUtc(), "query/time", 10) .build("broker", "brokerHost1") ); - verify(client); + EasyMock.verify(client); } @Test public void testBlankHolderOptions() { - StatsDClient client = createMock(StatsDClient.class); + StatsDClient client = EasyMock.createMock(StatsDClient.class); StatsDEmitter emitter = new StatsDEmitter( new StatsDEmitterConfig("localhost", 8888, null, null, true, null, null, null, null), new ObjectMapper(), client ); - client.count("brokerHost1.broker.jvm.gc.count.G1-GC", 1, new String[0]); - replay(client); + client.count("brokerHost1.broker.jvm.gc.count.G1-GC", 1); + EasyMock.replay(client); emitter.emit(new ServiceMetricEvent.Builder() .setDimension("gcName", "G1 GC") .build(DateTimes.nowUtc(), "jvm/gc/count", 1) .build("broker", "brokerHost1") ); - verify(client); + EasyMock.verify(client); } } diff --git a/extensions-contrib/thrift-extensions/src/test/java/org/apache/druid/data/input/thrift/ThriftInputRowParserTest.java b/extensions-contrib/thrift-extensions/src/test/java/org/apache/druid/data/input/thrift/ThriftInputRowParserTest.java index 3d890a7c9ff..db98c9e70db 100644 --- a/extensions-contrib/thrift-extensions/src/test/java/org/apache/druid/data/input/thrift/ThriftInputRowParserTest.java +++ b/extensions-contrib/thrift-extensions/src/test/java/org/apache/druid/data/input/thrift/ThriftInputRowParserTest.java @@ -39,6 +39,7 @@ import org.apache.thrift.protocol.TBinaryProtocol; import org.apache.thrift.protocol.TCompactProtocol; import org.apache.thrift.protocol.TJSONProtocol; import org.hamcrest.CoreMatchers; +import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -46,9 +47,6 @@ import org.junit.rules.ExpectedException; import java.nio.ByteBuffer; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - public class ThriftInputRowParserTest { @Rule @@ -82,10 +80,10 @@ public class ThriftInputRowParserTest "example/book.jar", "org.apache.druid.data.input.thrift.Book" ); - assertEquals(parser1.getThriftClass().getName(), "org.apache.druid.data.input.thrift.Book"); + Assert.assertEquals("org.apache.druid.data.input.thrift.Book", parser1.getThriftClass().getName()); ThriftInputRowParser parser2 = new ThriftInputRowParser(parseSpec, null, "org.apache.druid.data.input.thrift.Book"); - assertEquals(parser2.getThriftClass().getName(), "org.apache.druid.data.input.thrift.Book"); + Assert.assertEquals("org.apache.druid.data.input.thrift.Book", parser2.getThriftClass().getName()); } @Test @@ -144,17 +142,18 @@ public class ThriftInputRowParserTest expectedException.expect(CoreMatchers.instanceOf(IllegalStateException.class)); expectedException.expectMessage("JavaScript is disabled"); + //noinspection ResultOfMethodCallIgnored (this method call will trigger the expected exception) parser.parseBatch(ByteBuffer.allocate(1)).get(0); } - public void serializationAndTest(ThriftInputRowParser parser, byte[] bytes) + private void serializationAndTest(ThriftInputRowParser parser, byte[] bytes) { ByteBuffer buffer = ByteBuffer.wrap(bytes); InputRow row1 = parser.parseBatch(buffer).get(0); - assertTrue(row1.getDimension("title").get(0).equals("title")); + Assert.assertEquals("title", row1.getDimension("title").get(0)); InputRow row2 = parser.parseBatch(new BytesWritable(bytes)).get(0); - assertTrue(row2.getDimension("lastName").get(0).equals("last")); + Assert.assertEquals("last", row2.getDimension("lastName").get(0)); } } diff --git a/extensions-core/avro-extensions/src/test/java/org/apache/druid/data/input/AvroHadoopInputRowParserTest.java b/extensions-core/avro-extensions/src/test/java/org/apache/druid/data/input/AvroHadoopInputRowParserTest.java index 082f5e7cd62..c065234d60d 100644 --- a/extensions-core/avro-extensions/src/test/java/org/apache/druid/data/input/AvroHadoopInputRowParserTest.java +++ b/extensions-core/avro-extensions/src/test/java/org/apache/druid/data/input/AvroHadoopInputRowParserTest.java @@ -35,10 +35,6 @@ import org.junit.Test; import java.io.File; import java.io.IOException; -import static org.apache.druid.data.input.AvroStreamInputRowParserTest.DIMENSIONS; -import static org.apache.druid.data.input.AvroStreamInputRowParserTest.PARSE_SPEC; -import static org.apache.druid.data.input.AvroStreamInputRowParserTest.assertInputRowCorrect; -import static org.apache.druid.data.input.AvroStreamInputRowParserTest.buildSomeAvroDatum; public class AvroHadoopInputRowParserTest { @@ -55,7 +51,7 @@ public class AvroHadoopInputRowParserTest @Test public void testParseNotFromSpark() throws IOException { - testParse(buildSomeAvroDatum()); + testParse(AvroStreamInputRowParserTest.buildSomeAvroDatum()); } @Test @@ -66,19 +62,19 @@ public class AvroHadoopInputRowParserTest private void testParse(GenericRecord record) throws IOException { - AvroHadoopInputRowParser parser = new AvroHadoopInputRowParser(PARSE_SPEC); + AvroHadoopInputRowParser parser = new AvroHadoopInputRowParser(AvroStreamInputRowParserTest.PARSE_SPEC); AvroHadoopInputRowParser parser2 = jsonMapper.readValue( jsonMapper.writeValueAsBytes(parser), AvroHadoopInputRowParser.class ); InputRow inputRow = parser2.parseBatch(record).get(0); - assertInputRowCorrect(inputRow, DIMENSIONS); + AvroStreamInputRowParserTest.assertInputRowCorrect(inputRow, AvroStreamInputRowParserTest.DIMENSIONS); } private static GenericRecord buildAvroFromFile() throws IOException { return buildAvroFromFile( - buildSomeAvroDatum() + AvroStreamInputRowParserTest.buildSomeAvroDatum() ); } @@ -107,5 +103,4 @@ public class AvroHadoopInputRowParserTest return record; } - } diff --git a/extensions-core/avro-extensions/src/test/java/org/apache/druid/data/input/AvroStreamInputRowParserTest.java b/extensions-core/avro-extensions/src/test/java/org/apache/druid/data/input/AvroStreamInputRowParserTest.java index cadf7a96dd5..291210eb45c 100644 --- a/extensions-core/avro-extensions/src/test/java/org/apache/druid/data/input/AvroStreamInputRowParserTest.java +++ b/extensions-core/avro-extensions/src/test/java/org/apache/druid/data/input/AvroStreamInputRowParserTest.java @@ -45,6 +45,7 @@ import org.apache.druid.java.util.common.parsers.JSONPathFieldType; import org.apache.druid.java.util.common.parsers.JSONPathSpec; import org.joda.time.DateTime; import org.joda.time.chrono.ISOChronology; +import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.schemarepo.InMemoryRepository; @@ -68,8 +69,6 @@ import java.util.List; import java.util.Map; import java.util.regex.Pattern; -import static org.junit.Assert.assertEquals; - public class AvroStreamInputRowParserTest { private static final String EVENT_TYPE = "eventType"; @@ -123,9 +122,9 @@ public class AvroStreamInputRowParserTest private static final long SUB_LONG_VALUE = 1543698L; private static final int SUB_INT_VALUE = 4892; private static final MySubRecord SOME_RECORD_VALUE = MySubRecord.newBuilder() - .setSubInt(SUB_INT_VALUE) - .setSubLong(SUB_LONG_VALUE) - .build(); + .setSubInt(SUB_INT_VALUE) + .setSubLong(SUB_LONG_VALUE) + .build(); private static final List SOME_STRING_ARRAY_VALUE = Arrays.asList("8", "4", "2", "1"); private static final List SOME_INT_ARRAY_VALUE = Arrays.asList(1, 2, 4, 8); private static final Map SOME_INT_VALUE_MAP_VALUE = Maps.asMap( @@ -181,7 +180,7 @@ public class AvroStreamInputRowParserTest ByteBufferInputRowParser.class ); - assertEquals(parser, parser2); + Assert.assertEquals(parser, parser2); } @Test @@ -204,7 +203,7 @@ public class AvroStreamInputRowParserTest // encode schema id Avro1124SubjectAndIdConverter converter = new Avro1124SubjectAndIdConverter(TOPIC); - TypedSchemaRepository repositoryClient = new TypedSchemaRepository( + TypedSchemaRepository repositoryClient = new TypedSchemaRepository<>( repository, new IntegerConverter(), new AvroSchemaConverter(), @@ -269,25 +268,28 @@ public class AvroStreamInputRowParserTest static void assertInputRowCorrect(InputRow inputRow, List expectedDimensions) { - assertEquals(expectedDimensions, inputRow.getDimensions()); - assertEquals(1543698L, inputRow.getTimestampFromEpoch()); + Assert.assertEquals(expectedDimensions, inputRow.getDimensions()); + Assert.assertEquals(1543698L, inputRow.getTimestampFromEpoch()); // test dimensions - assertEquals(Collections.singletonList(EVENT_TYPE_VALUE), inputRow.getDimension(EVENT_TYPE)); - assertEquals(Collections.singletonList(String.valueOf(ID_VALUE)), inputRow.getDimension(ID)); - assertEquals(Collections.singletonList(String.valueOf(SOME_OTHER_ID_VALUE)), inputRow.getDimension(SOME_OTHER_ID)); - assertEquals(Collections.singletonList(String.valueOf(true)), inputRow.getDimension(IS_VALID)); - assertEquals( + Assert.assertEquals(Collections.singletonList(EVENT_TYPE_VALUE), inputRow.getDimension(EVENT_TYPE)); + Assert.assertEquals(Collections.singletonList(String.valueOf(ID_VALUE)), inputRow.getDimension(ID)); + Assert.assertEquals( + Collections.singletonList(String.valueOf(SOME_OTHER_ID_VALUE)), + inputRow.getDimension(SOME_OTHER_ID) + ); + Assert.assertEquals(Collections.singletonList(String.valueOf(true)), inputRow.getDimension(IS_VALID)); + Assert.assertEquals( Lists.transform(SOME_INT_ARRAY_VALUE, String::valueOf), inputRow.getDimension("someIntArray") ); - assertEquals( + Assert.assertEquals( Lists.transform(SOME_STRING_ARRAY_VALUE, String::valueOf), inputRow.getDimension("someStringArray") ); // towards Map avro field as druid dimension, need to convert its toString() back to HashMap to check equality - assertEquals(1, inputRow.getDimension("someIntValueMap").size()); - assertEquals( + Assert.assertEquals(1, inputRow.getDimension("someIntValueMap").size()); + Assert.assertEquals( SOME_INT_VALUE_MAP_VALUE, new HashMap( Maps.transformValues( @@ -307,7 +309,7 @@ public class AvroStreamInputRowParserTest ) ) ); - assertEquals( + Assert.assertEquals( SOME_STRING_VALUE_MAP_VALUE, new HashMap( Splitter @@ -316,43 +318,46 @@ public class AvroStreamInputRowParserTest .split(BRACES_AND_SPACE.matcher(inputRow.getDimension("someIntValueMap").get(0)).replaceAll("")) ) ); - assertEquals(Collections.singletonList(SOME_UNION_VALUE), inputRow.getDimension("someUnion")); - assertEquals(Collections.emptyList(), inputRow.getDimension("someNull")); - assertEquals(SOME_FIXED_VALUE, inputRow.getRaw("someFixed")); - assertEquals( + Assert.assertEquals(Collections.singletonList(SOME_UNION_VALUE), inputRow.getDimension("someUnion")); + Assert.assertEquals(Collections.emptyList(), inputRow.getDimension("someNull")); + Assert.assertEquals(SOME_FIXED_VALUE, inputRow.getRaw("someFixed")); + Assert.assertEquals( Arrays.toString(SOME_BYTES_VALUE.array()), Arrays.toString((byte[]) (inputRow.getRaw("someBytes"))) ); - assertEquals(Collections.singletonList(String.valueOf(MyEnum.ENUM1)), inputRow.getDimension("someEnum")); - assertEquals(Collections.singletonList(String.valueOf(SOME_RECORD_VALUE)), inputRow.getDimension("someRecord")); + Assert.assertEquals(Collections.singletonList(String.valueOf(MyEnum.ENUM1)), inputRow.getDimension("someEnum")); + Assert.assertEquals( + Collections.singletonList(String.valueOf(SOME_RECORD_VALUE)), + inputRow.getDimension("someRecord") + ); // test metrics - assertEquals(SOME_FLOAT_VALUE, inputRow.getMetric("someFloat").floatValue(), 0); - assertEquals(SOME_LONG_VALUE, inputRow.getMetric("someLong")); - assertEquals(SOME_INT_VALUE, inputRow.getMetric("someInt")); + Assert.assertEquals(SOME_FLOAT_VALUE, inputRow.getMetric("someFloat").floatValue(), 0); + Assert.assertEquals(SOME_LONG_VALUE, inputRow.getMetric("someLong")); + Assert.assertEquals(SOME_INT_VALUE, inputRow.getMetric("someInt")); } public static SomeAvroDatum buildSomeAvroDatum() { return SomeAvroDatum.newBuilder() - .setTimestamp(DATE_TIME.getMillis()) - .setEventType(EVENT_TYPE_VALUE) - .setId(ID_VALUE) - .setSomeOtherId(SOME_OTHER_ID_VALUE) - .setIsValid(true) - .setSomeFloat(SOME_FLOAT_VALUE) - .setSomeInt(SOME_INT_VALUE) - .setSomeLong(SOME_LONG_VALUE) - .setSomeIntArray(SOME_INT_ARRAY_VALUE) - .setSomeStringArray(SOME_STRING_ARRAY_VALUE) - .setSomeIntValueMap(SOME_INT_VALUE_MAP_VALUE) - .setSomeStringValueMap(SOME_STRING_VALUE_MAP_VALUE) - .setSomeUnion(SOME_UNION_VALUE) - .setSomeFixed(SOME_FIXED_VALUE) - .setSomeBytes(SOME_BYTES_VALUE) - .setSomeNull(null) - .setSomeEnum(MyEnum.ENUM1) - .setSomeRecord(SOME_RECORD_VALUE) - .build(); + .setTimestamp(DATE_TIME.getMillis()) + .setEventType(EVENT_TYPE_VALUE) + .setId(ID_VALUE) + .setSomeOtherId(SOME_OTHER_ID_VALUE) + .setIsValid(true) + .setSomeFloat(SOME_FLOAT_VALUE) + .setSomeInt(SOME_INT_VALUE) + .setSomeLong(SOME_LONG_VALUE) + .setSomeIntArray(SOME_INT_ARRAY_VALUE) + .setSomeStringArray(SOME_STRING_ARRAY_VALUE) + .setSomeIntValueMap(SOME_INT_VALUE_MAP_VALUE) + .setSomeStringValueMap(SOME_STRING_VALUE_MAP_VALUE) + .setSomeUnion(SOME_UNION_VALUE) + .setSomeFixed(SOME_FIXED_VALUE) + .setSomeBytes(SOME_BYTES_VALUE) + .setSomeNull(null) + .setSomeEnum(MyEnum.ENUM1) + .setSomeRecord(SOME_RECORD_VALUE) + .build(); } } diff --git a/extensions-core/avro-extensions/src/test/java/org/apache/druid/data/input/avro/SchemaRegistryBasedAvroBytesDecoderTest.java b/extensions-core/avro-extensions/src/test/java/org/apache/druid/data/input/avro/SchemaRegistryBasedAvroBytesDecoderTest.java index 5481c0eb9cc..f5f1776a36f 100644 --- a/extensions-core/avro-extensions/src/test/java/org/apache/druid/data/input/avro/SchemaRegistryBasedAvroBytesDecoderTest.java +++ b/extensions-core/avro-extensions/src/test/java/org/apache/druid/data/input/avro/SchemaRegistryBasedAvroBytesDecoderTest.java @@ -31,18 +31,13 @@ import org.apache.druid.java.util.common.parsers.ParseException; import org.junit.Assert; import org.junit.Before; import org.junit.Test; +import org.mockito.ArgumentMatchers; +import org.mockito.Mockito; import java.io.ByteArrayOutputStream; import java.io.IOException; import java.nio.ByteBuffer; -import static org.mockito.ArgumentMatchers.anyInt; -import static org.mockito.ArgumentMatchers.eq; -import static org.mockito.Mockito.mock; -import static org.mockito.Mockito.when; - -/** - */ public class SchemaRegistryBasedAvroBytesDecoderTest { private SchemaRegistryClient registry; @@ -50,14 +45,14 @@ public class SchemaRegistryBasedAvroBytesDecoderTest @Before public void setUp() { - registry = mock(SchemaRegistryClient.class); + registry = Mockito.mock(SchemaRegistryClient.class); } @Test public void testParse() throws Exception { // Given - when(registry.getByID(eq(1234))).thenReturn(SomeAvroDatum.getClassSchema()); + Mockito.when(registry.getByID(ArgumentMatchers.eq(1234))).thenReturn(SomeAvroDatum.getClassSchema()); GenericRecord someAvroDatum = AvroStreamInputRowParserTest.buildSomeAvroDatum(); Schema schema = SomeAvroDatum.getClassSchema(); byte[] bytes = getAvroDatum(schema, someAvroDatum); @@ -73,7 +68,7 @@ public class SchemaRegistryBasedAvroBytesDecoderTest public void testParseCorrupted() throws Exception { // Given - when(registry.getByID(eq(1234))).thenReturn(SomeAvroDatum.getClassSchema()); + Mockito.when(registry.getByID(ArgumentMatchers.eq(1234))).thenReturn(SomeAvroDatum.getClassSchema()); GenericRecord someAvroDatum = AvroStreamInputRowParserTest.buildSomeAvroDatum(); Schema schema = SomeAvroDatum.getClassSchema(); byte[] bytes = getAvroDatum(schema, someAvroDatum); @@ -86,7 +81,7 @@ public class SchemaRegistryBasedAvroBytesDecoderTest public void testParseWrongId() throws Exception { // Given - when(registry.getByID(anyInt())).thenThrow(new IOException("no pasaran")); + Mockito.when(registry.getByID(ArgumentMatchers.anyInt())).thenThrow(new IOException("no pasaran")); GenericRecord someAvroDatum = AvroStreamInputRowParserTest.buildSomeAvroDatum(); Schema schema = SomeAvroDatum.getClassSchema(); byte[] bytes = getAvroDatum(schema, someAvroDatum); @@ -102,5 +97,4 @@ public class SchemaRegistryBasedAvroBytesDecoderTest writer.write(someAvroDatum, EncoderFactory.get().directBinaryEncoder(out, null)); return out.toByteArray(); } - } diff --git a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/BloomKFilterTest.java b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/BloomKFilterTest.java index 3385924e4e1..605634ad54e 100644 --- a/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/BloomKFilterTest.java +++ b/extensions-core/druid-bloom-filter/src/test/java/org/apache/druid/query/filter/BloomKFilterTest.java @@ -30,12 +30,10 @@ import java.nio.ByteBuffer; import java.util.Random; import java.util.concurrent.ThreadLocalRandom; -import static org.junit.Assert.assertEquals; - public class BloomKFilterTest { private static final int COUNT = 100; - Random rand = ThreadLocalRandom.current(); + private Random rand = ThreadLocalRandom.current(); @Test public void testBloomKFilterBytes() throws IOException @@ -56,31 +54,31 @@ public class BloomKFilterTest BloomKFilter.add(buffer, val); BloomKFilter rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); buffer.position(0); - assertEquals(true, rehydrated.test(val)); - assertEquals(false, rehydrated.test(val1)); - assertEquals(false, rehydrated.test(val2)); - assertEquals(false, rehydrated.test(val3)); + Assert.assertTrue(rehydrated.test(val)); + Assert.assertFalse(rehydrated.test(val1)); + Assert.assertFalse(rehydrated.test(val2)); + Assert.assertFalse(rehydrated.test(val3)); BloomKFilter.add(buffer, val1); rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); buffer.position(0); - assertEquals(true, rehydrated.test(val)); - assertEquals(true, rehydrated.test(val1)); - assertEquals(false, rehydrated.test(val2)); - assertEquals(false, rehydrated.test(val3)); + Assert.assertTrue(rehydrated.test(val)); + Assert.assertTrue(rehydrated.test(val1)); + Assert.assertFalse(rehydrated.test(val2)); + Assert.assertFalse(rehydrated.test(val3)); BloomKFilter.add(buffer, val2); rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); buffer.position(0); - assertEquals(true, rehydrated.test(val)); - assertEquals(true, rehydrated.test(val1)); - assertEquals(true, rehydrated.test(val2)); - assertEquals(false, rehydrated.test(val3)); + Assert.assertTrue(rehydrated.test(val)); + Assert.assertTrue(rehydrated.test(val1)); + Assert.assertTrue(rehydrated.test(val2)); + Assert.assertFalse(rehydrated.test(val3)); BloomKFilter.add(buffer, val3); rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); buffer.position(0); - assertEquals(true, rehydrated.test(val)); - assertEquals(true, rehydrated.test(val1)); - assertEquals(true, rehydrated.test(val2)); - assertEquals(true, rehydrated.test(val3)); + Assert.assertTrue(rehydrated.test(val)); + Assert.assertTrue(rehydrated.test(val1)); + Assert.assertTrue(rehydrated.test(val2)); + Assert.assertTrue(rehydrated.test(val3)); byte[] randVal = new byte[COUNT]; for (int i = 0; i < COUNT; i++) { @@ -89,16 +87,16 @@ public class BloomKFilterTest } // last value should be present rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); - assertEquals(true, rehydrated.test(randVal)); + Assert.assertTrue(rehydrated.test(randVal)); // most likely this value should not exist randVal[0] = 0; randVal[1] = 0; randVal[2] = 0; randVal[3] = 0; randVal[4] = 0; - assertEquals(false, rehydrated.test(randVal)); + Assert.assertFalse(rehydrated.test(randVal)); - assertEquals(7808, rehydrated.sizeInBytes()); + Assert.assertEquals(7808, rehydrated.sizeInBytes()); } @Test @@ -118,31 +116,31 @@ public class BloomKFilterTest BloomKFilter.addLong(buffer, val); BloomKFilter rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); buffer.position(0); - assertEquals(true, rehydrated.testLong(val)); - assertEquals(false, rehydrated.testLong(val1)); - assertEquals(false, rehydrated.testLong(val2)); - assertEquals(false, rehydrated.testLong(val3)); + Assert.assertTrue(rehydrated.testLong(val)); + Assert.assertFalse(rehydrated.testLong(val1)); + Assert.assertFalse(rehydrated.testLong(val2)); + Assert.assertFalse(rehydrated.testLong(val3)); BloomKFilter.addLong(buffer, val1); rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); buffer.position(0); - assertEquals(true, rehydrated.testLong(val)); - assertEquals(true, rehydrated.testLong(val1)); - assertEquals(false, rehydrated.testLong(val2)); - assertEquals(false, rehydrated.testLong(val3)); + Assert.assertTrue(rehydrated.testLong(val)); + Assert.assertTrue(rehydrated.testLong(val1)); + Assert.assertFalse(rehydrated.testLong(val2)); + Assert.assertFalse(rehydrated.testLong(val3)); BloomKFilter.addLong(buffer, val2); rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); buffer.position(0); - assertEquals(true, rehydrated.testLong(val)); - assertEquals(true, rehydrated.testLong(val1)); - assertEquals(true, rehydrated.testLong(val2)); - assertEquals(false, rehydrated.testLong(val3)); + Assert.assertTrue(rehydrated.testLong(val)); + Assert.assertTrue(rehydrated.testLong(val1)); + Assert.assertTrue(rehydrated.testLong(val2)); + Assert.assertFalse(rehydrated.testLong(val3)); BloomKFilter.addLong(buffer, val3); rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); buffer.position(0); - assertEquals(true, rehydrated.testLong(val)); - assertEquals(true, rehydrated.testLong(val1)); - assertEquals(true, rehydrated.testLong(val2)); - assertEquals(true, rehydrated.testLong(val3)); + Assert.assertTrue(rehydrated.testLong(val)); + Assert.assertTrue(rehydrated.testLong(val1)); + Assert.assertTrue(rehydrated.testLong(val2)); + Assert.assertTrue(rehydrated.testLong(val3)); byte randVal = 0; for (int i = 0; i < COUNT; i++) { @@ -153,11 +151,11 @@ public class BloomKFilterTest rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); // last value should be present - assertEquals(true, rehydrated.testLong(randVal)); + Assert.assertTrue(rehydrated.testLong(randVal)); // most likely this value should not exist - assertEquals(false, rehydrated.testLong((byte) -120)); + Assert.assertFalse(rehydrated.testLong((byte) -120)); - assertEquals(7808, rehydrated.sizeInBytes()); + Assert.assertEquals(7808, rehydrated.sizeInBytes()); } @Test @@ -177,31 +175,31 @@ public class BloomKFilterTest BloomKFilter.addLong(buffer, val); BloomKFilter rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); buffer.position(0); - assertEquals(true, rehydrated.testLong(val)); - assertEquals(false, rehydrated.testLong(val1)); - assertEquals(false, rehydrated.testLong(val2)); - assertEquals(false, rehydrated.testLong(val3)); + Assert.assertTrue(rehydrated.testLong(val)); + Assert.assertFalse(rehydrated.testLong(val1)); + Assert.assertFalse(rehydrated.testLong(val2)); + Assert.assertFalse(rehydrated.testLong(val3)); BloomKFilter.addLong(buffer, val1); rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); buffer.position(0); - assertEquals(true, rehydrated.testLong(val)); - assertEquals(true, rehydrated.testLong(val1)); - assertEquals(false, rehydrated.testLong(val2)); - assertEquals(false, rehydrated.testLong(val3)); + Assert.assertTrue(rehydrated.testLong(val)); + Assert.assertTrue(rehydrated.testLong(val1)); + Assert.assertFalse(rehydrated.testLong(val2)); + Assert.assertFalse(rehydrated.testLong(val3)); BloomKFilter.addLong(buffer, val2); rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); buffer.position(0); - assertEquals(true, rehydrated.testLong(val)); - assertEquals(true, rehydrated.testLong(val1)); - assertEquals(true, rehydrated.testLong(val2)); - assertEquals(false, rehydrated.testLong(val3)); + Assert.assertTrue(rehydrated.testLong(val)); + Assert.assertTrue(rehydrated.testLong(val1)); + Assert.assertTrue(rehydrated.testLong(val2)); + Assert.assertFalse(rehydrated.testLong(val3)); BloomKFilter.addLong(buffer, val3); rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); buffer.position(0); - assertEquals(true, rehydrated.testLong(val)); - assertEquals(true, rehydrated.testLong(val1)); - assertEquals(true, rehydrated.testLong(val2)); - assertEquals(true, rehydrated.testLong(val3)); + Assert.assertTrue(rehydrated.testLong(val)); + Assert.assertTrue(rehydrated.testLong(val1)); + Assert.assertTrue(rehydrated.testLong(val2)); + Assert.assertTrue(rehydrated.testLong(val3)); int randVal = 0; for (int i = 0; i < COUNT; i++) { @@ -210,11 +208,11 @@ public class BloomKFilterTest } rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); // last value should be present - assertEquals(true, rehydrated.testLong(randVal)); + Assert.assertTrue(rehydrated.testLong(randVal)); // most likely this value should not exist - assertEquals(false, rehydrated.testLong(-120)); + Assert.assertFalse(rehydrated.testLong(-120)); - assertEquals(7808, rehydrated.sizeInBytes()); + Assert.assertEquals(7808, rehydrated.sizeInBytes()); } @Test @@ -234,31 +232,31 @@ public class BloomKFilterTest BloomKFilter.addLong(buffer, val); BloomKFilter rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); buffer.position(0); - assertEquals(true, rehydrated.testLong(val)); - assertEquals(false, rehydrated.testLong(val1)); - assertEquals(false, rehydrated.testLong(val2)); - assertEquals(false, rehydrated.testLong(val3)); + Assert.assertTrue(rehydrated.testLong(val)); + Assert.assertFalse(rehydrated.testLong(val1)); + Assert.assertFalse(rehydrated.testLong(val2)); + Assert.assertFalse(rehydrated.testLong(val3)); BloomKFilter.addLong(buffer, val1); rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); buffer.position(0); - assertEquals(true, rehydrated.testLong(val)); - assertEquals(true, rehydrated.testLong(val1)); - assertEquals(false, rehydrated.testLong(val2)); - assertEquals(false, rehydrated.testLong(val3)); + Assert.assertTrue(rehydrated.testLong(val)); + Assert.assertTrue(rehydrated.testLong(val1)); + Assert.assertFalse(rehydrated.testLong(val2)); + Assert.assertFalse(rehydrated.testLong(val3)); BloomKFilter.addLong(buffer, val2); rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); buffer.position(0); - assertEquals(true, rehydrated.testLong(val)); - assertEquals(true, rehydrated.testLong(val1)); - assertEquals(true, rehydrated.testLong(val2)); - assertEquals(false, rehydrated.testLong(val3)); + Assert.assertTrue(rehydrated.testLong(val)); + Assert.assertTrue(rehydrated.testLong(val1)); + Assert.assertTrue(rehydrated.testLong(val2)); + Assert.assertFalse(rehydrated.testLong(val3)); BloomKFilter.addLong(buffer, val3); rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); buffer.position(0); - assertEquals(true, rehydrated.testLong(val)); - assertEquals(true, rehydrated.testLong(val1)); - assertEquals(true, rehydrated.testLong(val2)); - assertEquals(true, rehydrated.testLong(val3)); + Assert.assertTrue(rehydrated.testLong(val)); + Assert.assertTrue(rehydrated.testLong(val1)); + Assert.assertTrue(rehydrated.testLong(val2)); + Assert.assertTrue(rehydrated.testLong(val3)); int randVal = 0; for (int i = 0; i < COUNT; i++) { @@ -267,11 +265,11 @@ public class BloomKFilterTest } rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); // last value should be present - assertEquals(true, rehydrated.testLong(randVal)); + Assert.assertTrue(rehydrated.testLong(randVal)); // most likely this value should not exist - assertEquals(false, rehydrated.testLong(-120)); + Assert.assertFalse(rehydrated.testLong(-120)); - assertEquals(7808, rehydrated.sizeInBytes()); + Assert.assertEquals(7808, rehydrated.sizeInBytes()); } @Test @@ -291,31 +289,31 @@ public class BloomKFilterTest BloomKFilter.addFloat(buffer, val); BloomKFilter rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); buffer.position(0); - assertEquals(true, rehydrated.testFloat(val)); - assertEquals(false, rehydrated.testFloat(val1)); - assertEquals(false, rehydrated.testFloat(val2)); - assertEquals(false, rehydrated.testFloat(val3)); + Assert.assertTrue(rehydrated.testFloat(val)); + Assert.assertFalse(rehydrated.testFloat(val1)); + Assert.assertFalse(rehydrated.testFloat(val2)); + Assert.assertFalse(rehydrated.testFloat(val3)); BloomKFilter.addFloat(buffer, val1); rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); buffer.position(0); - assertEquals(true, rehydrated.testFloat(val)); - assertEquals(true, rehydrated.testFloat(val1)); - assertEquals(false, rehydrated.testFloat(val2)); - assertEquals(false, rehydrated.testFloat(val3)); + Assert.assertTrue(rehydrated.testFloat(val)); + Assert.assertTrue(rehydrated.testFloat(val1)); + Assert.assertFalse(rehydrated.testFloat(val2)); + Assert.assertFalse(rehydrated.testFloat(val3)); BloomKFilter.addFloat(buffer, val2); rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); buffer.position(0); - assertEquals(true, rehydrated.testFloat(val)); - assertEquals(true, rehydrated.testFloat(val1)); - assertEquals(true, rehydrated.testFloat(val2)); - assertEquals(false, rehydrated.testFloat(val3)); + Assert.assertTrue(rehydrated.testFloat(val)); + Assert.assertTrue(rehydrated.testFloat(val1)); + Assert.assertTrue(rehydrated.testFloat(val2)); + Assert.assertFalse(rehydrated.testFloat(val3)); BloomKFilter.addFloat(buffer, val3); rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); buffer.position(0); - assertEquals(true, rehydrated.testFloat(val)); - assertEquals(true, rehydrated.testFloat(val1)); - assertEquals(true, rehydrated.testFloat(val2)); - assertEquals(true, rehydrated.testFloat(val3)); + Assert.assertTrue(rehydrated.testFloat(val)); + Assert.assertTrue(rehydrated.testFloat(val1)); + Assert.assertTrue(rehydrated.testFloat(val2)); + Assert.assertTrue(rehydrated.testFloat(val3)); float randVal = 0; for (int i = 0; i < COUNT; i++) { @@ -325,11 +323,11 @@ public class BloomKFilterTest rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); // last value should be present - assertEquals(true, rehydrated.testFloat(randVal)); + Assert.assertTrue(rehydrated.testFloat(randVal)); // most likely this value should not exist - assertEquals(false, rehydrated.testFloat(-120.2f)); + Assert.assertFalse(rehydrated.testFloat(-120.2f)); - assertEquals(7808, rehydrated.sizeInBytes()); + Assert.assertEquals(7808, rehydrated.sizeInBytes()); } @Test @@ -349,31 +347,31 @@ public class BloomKFilterTest BloomKFilter.addDouble(buffer, val); BloomKFilter rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); buffer.position(0); - assertEquals(true, rehydrated.testDouble(val)); - assertEquals(false, rehydrated.testDouble(val1)); - assertEquals(false, rehydrated.testDouble(val2)); - assertEquals(false, rehydrated.testDouble(val3)); + Assert.assertTrue(rehydrated.testDouble(val)); + Assert.assertFalse(rehydrated.testDouble(val1)); + Assert.assertFalse(rehydrated.testDouble(val2)); + Assert.assertFalse(rehydrated.testDouble(val3)); BloomKFilter.addDouble(buffer, val1); rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); buffer.position(0); - assertEquals(true, rehydrated.testDouble(val)); - assertEquals(true, rehydrated.testDouble(val1)); - assertEquals(false, rehydrated.testDouble(val2)); - assertEquals(false, rehydrated.testDouble(val3)); + Assert.assertTrue(rehydrated.testDouble(val)); + Assert.assertTrue(rehydrated.testDouble(val1)); + Assert.assertFalse(rehydrated.testDouble(val2)); + Assert.assertFalse(rehydrated.testDouble(val3)); BloomKFilter.addDouble(buffer, val2); rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); buffer.position(0); - assertEquals(true, rehydrated.testDouble(val)); - assertEquals(true, rehydrated.testDouble(val1)); - assertEquals(true, rehydrated.testDouble(val2)); - assertEquals(false, rehydrated.testDouble(val3)); + Assert.assertTrue(rehydrated.testDouble(val)); + Assert.assertTrue(rehydrated.testDouble(val1)); + Assert.assertTrue(rehydrated.testDouble(val2)); + Assert.assertFalse(rehydrated.testDouble(val3)); BloomKFilter.addDouble(buffer, val3); rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); buffer.position(0); - assertEquals(true, rehydrated.testDouble(val)); - assertEquals(true, rehydrated.testDouble(val1)); - assertEquals(true, rehydrated.testDouble(val2)); - assertEquals(true, rehydrated.testDouble(val3)); + Assert.assertTrue(rehydrated.testDouble(val)); + Assert.assertTrue(rehydrated.testDouble(val1)); + Assert.assertTrue(rehydrated.testDouble(val2)); + Assert.assertTrue(rehydrated.testDouble(val3)); double randVal = 0; for (int i = 0; i < COUNT; i++) { @@ -383,11 +381,11 @@ public class BloomKFilterTest rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); // last value should be present - assertEquals(true, rehydrated.testDouble(randVal)); + Assert.assertTrue(rehydrated.testDouble(randVal)); // most likely this value should not exist - assertEquals(false, rehydrated.testDouble(-120.2d)); + Assert.assertFalse(rehydrated.testDouble(-120.2d)); - assertEquals(7808, rehydrated.sizeInBytes()); + Assert.assertEquals(7808, rehydrated.sizeInBytes()); } @Test @@ -407,31 +405,31 @@ public class BloomKFilterTest BloomKFilter.addString(buffer, val); BloomKFilter rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); buffer.position(0); - assertEquals(true, rehydrated.testString(val)); - assertEquals(false, rehydrated.testString(val1)); - assertEquals(false, rehydrated.testString(val2)); - assertEquals(false, rehydrated.testString(val3)); + Assert.assertTrue(rehydrated.testString(val)); + Assert.assertFalse(rehydrated.testString(val1)); + Assert.assertFalse(rehydrated.testString(val2)); + Assert.assertFalse(rehydrated.testString(val3)); BloomKFilter.addString(buffer, val1); rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); buffer.position(0); - assertEquals(true, rehydrated.testString(val)); - assertEquals(true, rehydrated.testString(val1)); - assertEquals(false, rehydrated.testString(val2)); - assertEquals(false, rehydrated.testString(val3)); + Assert.assertTrue(rehydrated.testString(val)); + Assert.assertTrue(rehydrated.testString(val1)); + Assert.assertFalse(rehydrated.testString(val2)); + Assert.assertFalse(rehydrated.testString(val3)); BloomKFilter.addString(buffer, val2); rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); buffer.position(0); - assertEquals(true, rehydrated.testString(val)); - assertEquals(true, rehydrated.testString(val1)); - assertEquals(true, rehydrated.testString(val2)); - assertEquals(false, rehydrated.testString(val3)); + Assert.assertTrue(rehydrated.testString(val)); + Assert.assertTrue(rehydrated.testString(val1)); + Assert.assertTrue(rehydrated.testString(val2)); + Assert.assertFalse(rehydrated.testString(val3)); BloomKFilter.addString(buffer, val3); rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); buffer.position(0); - assertEquals(true, rehydrated.testString(val)); - assertEquals(true, rehydrated.testString(val1)); - assertEquals(true, rehydrated.testString(val2)); - assertEquals(true, rehydrated.testString(val3)); + Assert.assertTrue(rehydrated.testString(val)); + Assert.assertTrue(rehydrated.testString(val1)); + Assert.assertTrue(rehydrated.testString(val2)); + Assert.assertTrue(rehydrated.testString(val3)); long randVal = 0; for (int i = 0; i < COUNT; i++) { @@ -440,11 +438,11 @@ public class BloomKFilterTest } rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer)); // last value should be present - assertEquals(true, rehydrated.testString(Long.toString(randVal))); + Assert.assertTrue(rehydrated.testString(Long.toString(randVal))); // most likely this value should not exist - assertEquals(false, rehydrated.testString(Long.toString(-120))); + Assert.assertFalse(rehydrated.testString(Long.toString(-120))); - assertEquals(77952, rehydrated.sizeInBytes()); + Assert.assertEquals(77952, rehydrated.sizeInBytes()); } @Test @@ -500,10 +498,10 @@ public class BloomKFilterTest BloomKFilter bfMerged = BloomKFilter.deserialize(bytesIn); // All values should pass test for (String val : inputs1) { - assert bfMerged.testString(val); + Assert.assertTrue(bfMerged.testString(val)); } for (String val : inputs2) { - assert bfMerged.testString(val); + Assert.assertTrue(bfMerged.testString(val)); } } diff --git a/extensions-core/google-extensions/src/test/java/org/apache/druid/storage/google/GoogleByteSourceTest.java b/extensions-core/google-extensions/src/test/java/org/apache/druid/storage/google/GoogleByteSourceTest.java index 4eb55bd2dc1..a65f4750d3b 100644 --- a/extensions-core/google-extensions/src/test/java/org/apache/druid/storage/google/GoogleByteSourceTest.java +++ b/extensions-core/google-extensions/src/test/java/org/apache/druid/storage/google/GoogleByteSourceTest.java @@ -19,14 +19,13 @@ package org.apache.druid.storage.google; +import org.easymock.EasyMock; import org.easymock.EasyMockSupport; import org.junit.Test; import java.io.IOException; import java.io.InputStream; -import static org.easymock.EasyMock.expect; - public class GoogleByteSourceTest extends EasyMockSupport { @Test @@ -37,7 +36,7 @@ public class GoogleByteSourceTest extends EasyMockSupport GoogleStorage storage = createMock(GoogleStorage.class); InputStream stream = createMock(InputStream.class); - expect(storage.get(bucket, path)).andReturn(stream); + EasyMock.expect(storage.get(bucket, path)).andReturn(stream); replayAll(); @@ -55,7 +54,7 @@ public class GoogleByteSourceTest extends EasyMockSupport final String path = "/path/to/file"; GoogleStorage storage = createMock(GoogleStorage.class); - expect(storage.get(bucket, path)).andThrow(new IOException("")); + EasyMock.expect(storage.get(bucket, path)).andThrow(new IOException("")); replayAll(); diff --git a/extensions-core/google-extensions/src/test/java/org/apache/druid/storage/google/GoogleDataSegmentKillerTest.java b/extensions-core/google-extensions/src/test/java/org/apache/druid/storage/google/GoogleDataSegmentKillerTest.java index 7b0812b37d3..bf99cf3e188 100644 --- a/extensions-core/google-extensions/src/test/java/org/apache/druid/storage/google/GoogleDataSegmentKillerTest.java +++ b/extensions-core/google-extensions/src/test/java/org/apache/druid/storage/google/GoogleDataSegmentKillerTest.java @@ -35,19 +35,17 @@ import org.junit.Test; import java.io.IOException; -import static org.easymock.EasyMock.expectLastCall; - public class GoogleDataSegmentKillerTest extends EasyMockSupport { - private static final String bucket = "bucket"; - private static final String indexPath = "test/2015-04-12T00:00:00.000Z_2015-04-13T00:00:00.000Z/1/0/index.zip"; - private static final String descriptorPath = DataSegmentKiller.descriptorPath(indexPath); + private static final String BUCKET = "bucket"; + private static final String INDEX_PATH = "test/2015-04-12T00:00:00.000Z_2015-04-13T00:00:00.000Z/1/0/index.zip"; + private static final String DESCRIPTOR_PATH = DataSegmentKiller.descriptorPath(INDEX_PATH); - private static final DataSegment dataSegment = new DataSegment( + private static final DataSegment DATA_SEGMENT = new DataSegment( "test", Intervals.of("2015-04-12/2015-04-13"), "1", - ImmutableMap.of("bucket", bucket, "path", indexPath), + ImmutableMap.of("bucket", BUCKET, "path", INDEX_PATH), null, null, NoneShardSpec.instance(), @@ -66,16 +64,16 @@ public class GoogleDataSegmentKillerTest extends EasyMockSupport @Test public void killTest() throws SegmentLoadingException, IOException { - storage.delete(EasyMock.eq(bucket), EasyMock.eq(indexPath)); - expectLastCall(); - storage.delete(EasyMock.eq(bucket), EasyMock.eq(descriptorPath)); - expectLastCall(); + storage.delete(EasyMock.eq(BUCKET), EasyMock.eq(INDEX_PATH)); + EasyMock.expectLastCall(); + storage.delete(EasyMock.eq(BUCKET), EasyMock.eq(DESCRIPTOR_PATH)); + EasyMock.expectLastCall(); replayAll(); GoogleDataSegmentKiller killer = new GoogleDataSegmentKiller(storage); - killer.kill(dataSegment); + killer.kill(DATA_SEGMENT); verifyAll(); } @@ -88,14 +86,14 @@ public class GoogleDataSegmentKillerTest extends EasyMockSupport 300, "test" ); - storage.delete(EasyMock.eq(bucket), EasyMock.eq(indexPath)); - expectLastCall().andThrow(exception); + storage.delete(EasyMock.eq(BUCKET), EasyMock.eq(INDEX_PATH)); + EasyMock.expectLastCall().andThrow(exception); replayAll(); GoogleDataSegmentKiller killer = new GoogleDataSegmentKiller(storage); - killer.kill(dataSegment); + killer.kill(DATA_SEGMENT); verifyAll(); } @@ -108,16 +106,16 @@ public class GoogleDataSegmentKillerTest extends EasyMockSupport 500, "test" ); - storage.delete(EasyMock.eq(bucket), EasyMock.eq(indexPath)); - expectLastCall().andThrow(exception).once().andVoid().once(); - storage.delete(EasyMock.eq(bucket), EasyMock.eq(descriptorPath)); - expectLastCall().andThrow(exception).once().andVoid().once(); + storage.delete(EasyMock.eq(BUCKET), EasyMock.eq(INDEX_PATH)); + EasyMock.expectLastCall().andThrow(exception).once().andVoid().once(); + storage.delete(EasyMock.eq(BUCKET), EasyMock.eq(DESCRIPTOR_PATH)); + EasyMock.expectLastCall().andThrow(exception).once().andVoid().once(); replayAll(); GoogleDataSegmentKiller killer = new GoogleDataSegmentKiller(storage); - killer.kill(dataSegment); + killer.kill(DATA_SEGMENT); verifyAll(); } diff --git a/extensions-core/google-extensions/src/test/java/org/apache/druid/storage/google/GoogleDataSegmentPullerTest.java b/extensions-core/google-extensions/src/test/java/org/apache/druid/storage/google/GoogleDataSegmentPullerTest.java index d6f1a0bb99b..9208f3d078a 100644 --- a/extensions-core/google-extensions/src/test/java/org/apache/druid/storage/google/GoogleDataSegmentPullerTest.java +++ b/extensions-core/google-extensions/src/test/java/org/apache/druid/storage/google/GoogleDataSegmentPullerTest.java @@ -26,19 +26,17 @@ import org.apache.commons.io.FileUtils; import org.apache.druid.segment.loading.SegmentLoadingException; import org.easymock.EasyMock; import org.easymock.EasyMockSupport; +import org.junit.Assert; import org.junit.Test; import java.io.File; import java.io.IOException; import java.nio.file.Files; -import static org.easymock.EasyMock.expect; -import static org.junit.Assert.assertFalse; - public class GoogleDataSegmentPullerTest extends EasyMockSupport { - private static final String bucket = "bucket"; - private static final String path = "/path/to/storage/index.zip"; + private static final String BUCKET = "bucket"; + private static final String PATH = "/path/to/storage/index.zip"; @Test(expected = SegmentLoadingException.class) public void testDeleteOutputDirectoryWhenErrorIsRaisedPullingSegmentFiles() @@ -52,14 +50,14 @@ public class GoogleDataSegmentPullerTest extends EasyMockSupport 300, "test" ); - expect(storage.get(EasyMock.eq(bucket), EasyMock.eq(path))).andThrow(exception); + EasyMock.expect(storage.get(EasyMock.eq(BUCKET), EasyMock.eq(PATH))).andThrow(exception); replayAll(); GoogleDataSegmentPuller puller = new GoogleDataSegmentPuller(storage); - puller.getSegmentFiles(bucket, path, outDir); + puller.getSegmentFiles(BUCKET, PATH, outDir); - assertFalse(outDir.exists()); + Assert.assertFalse(outDir.exists()); verifyAll(); } diff --git a/extensions-core/google-extensions/src/test/java/org/apache/druid/storage/google/GoogleDataSegmentPusherTest.java b/extensions-core/google-extensions/src/test/java/org/apache/druid/storage/google/GoogleDataSegmentPusherTest.java index c5ba422c660..2c2d8390645 100644 --- a/extensions-core/google-extensions/src/test/java/org/apache/druid/storage/google/GoogleDataSegmentPusherTest.java +++ b/extensions-core/google-extensions/src/test/java/org/apache/druid/storage/google/GoogleDataSegmentPusherTest.java @@ -36,15 +36,13 @@ import java.io.File; import java.util.ArrayList; import java.util.HashMap; -import static org.easymock.EasyMock.expectLastCall; - public class GoogleDataSegmentPusherTest extends EasyMockSupport { @Rule public final TemporaryFolder tempFolder = new TemporaryFolder(); - private static final String bucket = "bucket"; - private static final String prefix = "prefix"; + private static final String BUCKET = "bucket"; + private static final String PREFIX = "prefix"; private GoogleStorage storage; private GoogleAccountConfig googleAccountConfig; @@ -54,8 +52,8 @@ public class GoogleDataSegmentPusherTest extends EasyMockSupport { storage = createMock(GoogleStorage.class); googleAccountConfig = new GoogleAccountConfig(); - googleAccountConfig.setBucket(bucket); - googleAccountConfig.setPrefix(prefix); + googleAccountConfig.setBucket(BUCKET); + googleAccountConfig.setPrefix(PREFIX); } @Test @@ -86,14 +84,14 @@ public class GoogleDataSegmentPusherTest extends EasyMockSupport .createMock(); final String storageDir = pusher.getStorageDir(segmentToPush, false); - final String indexPath = prefix + "/" + storageDir + "/" + "index.zip"; + final String indexPath = PREFIX + "/" + storageDir + "/" + "index.zip"; pusher.insert( EasyMock.anyObject(File.class), EasyMock.eq("application/zip"), EasyMock.eq(indexPath) ); - expectLastCall(); + EasyMock.expectLastCall(); replayAll(); @@ -102,12 +100,9 @@ public class GoogleDataSegmentPusherTest extends EasyMockSupport Assert.assertEquals(segmentToPush.getSize(), segment.getSize()); Assert.assertEquals(segmentToPush, segment); Assert.assertEquals(ImmutableMap.of( - "type", - GoogleStorageDruidModule.SCHEME, - "bucket", - bucket, - "path", - indexPath + "type", GoogleStorageDruidModule.SCHEME, + "bucket", BUCKET, + "path", indexPath ), segment.getLoadSpec()); verifyAll(); diff --git a/extensions-core/google-extensions/src/test/java/org/apache/druid/storage/google/GoogleTaskLogsTest.java b/extensions-core/google-extensions/src/test/java/org/apache/druid/storage/google/GoogleTaskLogsTest.java index edb768ba76d..56687b6768d 100644 --- a/extensions-core/google-extensions/src/test/java/org/apache/druid/storage/google/GoogleTaskLogsTest.java +++ b/extensions-core/google-extensions/src/test/java/org/apache/druid/storage/google/GoogleTaskLogsTest.java @@ -38,14 +38,11 @@ import java.io.File; import java.io.StringWriter; import java.nio.charset.StandardCharsets; -import static org.easymock.EasyMock.expect; -import static org.easymock.EasyMock.expectLastCall; - public class GoogleTaskLogsTest extends EasyMockSupport { - private static final String bucket = "test"; - private static final String prefix = "test/log"; - private static final String taskid = "taskid"; + private static final String BUCKET = "test"; + private static final String PREFIX = "test/log"; + private static final String TASKID = "taskid"; private GoogleStorage storage; private GoogleTaskLogs googleTaskLogs; @@ -54,7 +51,7 @@ public class GoogleTaskLogsTest extends EasyMockSupport public void before() { storage = createMock(GoogleStorage.class); - GoogleTaskLogsConfig config = new GoogleTaskLogsConfig(bucket, prefix); + GoogleTaskLogsConfig config = new GoogleTaskLogsConfig(BUCKET, PREFIX); googleTaskLogs = new GoogleTaskLogs(config, storage); } @@ -69,12 +66,16 @@ public class GoogleTaskLogsTest extends EasyMockSupport output.write("test"); output.close(); - storage.insert(EasyMock.eq(bucket), EasyMock.eq(prefix + "/" + taskid), EasyMock.anyObject(InputStreamContent.class)); - expectLastCall(); + storage.insert( + EasyMock.eq(BUCKET), + EasyMock.eq(PREFIX + "/" + TASKID), + EasyMock.anyObject(InputStreamContent.class) + ); + EasyMock.expectLastCall(); replayAll(); - googleTaskLogs.pushTaskLog(taskid, logFile); + googleTaskLogs.pushTaskLog(TASKID, logFile); verifyAll(); } @@ -88,14 +89,14 @@ public class GoogleTaskLogsTest extends EasyMockSupport { final String testLog = "hello this is a log"; - final String logPath = prefix + "/" + taskid; - expect(storage.exists(bucket, logPath)).andReturn(true); - expect(storage.size(bucket, logPath)).andReturn((long) testLog.length()); - expect(storage.get(bucket, logPath)).andReturn(new ByteArrayInputStream(StringUtils.toUtf8(testLog))); + final String logPath = PREFIX + "/" + TASKID; + EasyMock.expect(storage.exists(BUCKET, logPath)).andReturn(true); + EasyMock.expect(storage.size(BUCKET, logPath)).andReturn((long) testLog.length()); + EasyMock.expect(storage.get(BUCKET, logPath)).andReturn(new ByteArrayInputStream(StringUtils.toUtf8(testLog))); replayAll(); - final Optional byteSource = googleTaskLogs.streamTaskLog(taskid, 0); + final Optional byteSource = googleTaskLogs.streamTaskLog(TASKID, 0); final StringWriter writer = new StringWriter(); IOUtils.copy(byteSource.get().openStream(), writer, "UTF-8"); @@ -109,14 +110,14 @@ public class GoogleTaskLogsTest extends EasyMockSupport { final String testLog = "hello this is a log"; - final String logPath = prefix + "/" + taskid; - expect(storage.exists(bucket, logPath)).andReturn(true); - expect(storage.size(bucket, logPath)).andReturn((long) testLog.length()); - expect(storage.get(bucket, logPath)).andReturn(new ByteArrayInputStream(StringUtils.toUtf8(testLog))); + final String logPath = PREFIX + "/" + TASKID; + EasyMock.expect(storage.exists(BUCKET, logPath)).andReturn(true); + EasyMock.expect(storage.size(BUCKET, logPath)).andReturn((long) testLog.length()); + EasyMock.expect(storage.get(BUCKET, logPath)).andReturn(new ByteArrayInputStream(StringUtils.toUtf8(testLog))); replayAll(); - final Optional byteSource = googleTaskLogs.streamTaskLog(taskid, 5); + final Optional byteSource = googleTaskLogs.streamTaskLog(TASKID, 5); final StringWriter writer = new StringWriter(); IOUtils.copy(byteSource.get().openStream(), writer, "UTF-8"); @@ -130,14 +131,14 @@ public class GoogleTaskLogsTest extends EasyMockSupport { final String testLog = "hello this is a log"; - final String logPath = prefix + "/" + taskid; - expect(storage.exists(bucket, logPath)).andReturn(true); - expect(storage.size(bucket, logPath)).andReturn((long) testLog.length()); - expect(storage.get(bucket, logPath)).andReturn(new ByteArrayInputStream(StringUtils.toUtf8(testLog))); + final String logPath = PREFIX + "/" + TASKID; + EasyMock.expect(storage.exists(BUCKET, logPath)).andReturn(true); + EasyMock.expect(storage.size(BUCKET, logPath)).andReturn((long) testLog.length()); + EasyMock.expect(storage.get(BUCKET, logPath)).andReturn(new ByteArrayInputStream(StringUtils.toUtf8(testLog))); replayAll(); - final Optional byteSource = googleTaskLogs.streamTaskLog(taskid, -3); + final Optional byteSource = googleTaskLogs.streamTaskLog(TASKID, -3); final StringWriter writer = new StringWriter(); IOUtils.copy(byteSource.get().openStream(), writer, "UTF-8"); diff --git a/extensions-core/kafka-extraction-namespace/src/test/java/org/apache/druid/query/lookup/KafkaLookupExtractorFactoryTest.java b/extensions-core/kafka-extraction-namespace/src/test/java/org/apache/druid/query/lookup/KafkaLookupExtractorFactoryTest.java index 9e4bea5e9e5..f50d65c54c8 100644 --- a/extensions-core/kafka-extraction-namespace/src/test/java/org/apache/druid/query/lookup/KafkaLookupExtractorFactoryTest.java +++ b/extensions-core/kafka-extraction-namespace/src/test/java/org/apache/druid/query/lookup/KafkaLookupExtractorFactoryTest.java @@ -57,8 +57,6 @@ import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.atomic.AtomicBoolean; import java.util.concurrent.atomic.AtomicLong; -import static org.apache.druid.query.lookup.KafkaLookupExtractorFactory.DEFAULT_STRING_DECODER; - @RunWith(PowerMockRunner.class) @PrepareForTest({ NamespaceExtractionCacheManager.class, @@ -80,7 +78,8 @@ public class KafkaLookupExtractorFactoryTest "some.property", "some.value" ); private final ObjectMapper mapper = new DefaultObjectMapper(); - private final NamespaceExtractionCacheManager cacheManager = PowerMock.createStrictMock(NamespaceExtractionCacheManager.class); + private final NamespaceExtractionCacheManager cacheManager = PowerMock.createStrictMock( + NamespaceExtractionCacheManager.class); private final CacheHandler cacheHandler = PowerMock.createStrictMock(CacheHandler.class); @@ -199,6 +198,7 @@ public class KafkaLookupExtractorFactoryTest DEFAULT_PROPERTIES ); factory1.getMapRef().set(ImmutableMap.of()); + //noinspection StringConcatenationMissingWhitespace final KafkaLookupExtractorFactory factory2 = new KafkaLookupExtractorFactory( cacheManager, TOPIC + "b", @@ -228,6 +228,7 @@ public class KafkaLookupExtractorFactoryTest DEFAULT_PROPERTIES ))); + //noinspection StringConcatenationMissingWhitespace Assert.assertTrue(factory.replaces(new KafkaLookupExtractorFactory( cacheManager, TOPIC + "b", @@ -283,29 +284,23 @@ public class KafkaLookupExtractorFactoryTest EasyMock.expect(consumerConnector.createMessageStreamsByFilter( EasyMock.anyObject(TopicFilter.class), EasyMock.anyInt(), - EasyMock.eq( - DEFAULT_STRING_DECODER), - EasyMock.eq(DEFAULT_STRING_DECODER) + EasyMock.eq(KafkaLookupExtractorFactory.DEFAULT_STRING_DECODER), + EasyMock.eq(KafkaLookupExtractorFactory.DEFAULT_STRING_DECODER) )).andReturn(ImmutableList.of(kafkaStream)).once(); EasyMock.expect(kafkaStream.iterator()).andReturn(consumerIterator).anyTimes(); EasyMock.expect(consumerIterator.hasNext()).andAnswer(getBlockingAnswer()).anyTimes(); EasyMock.expect(cacheManager.createCache()) .andReturn(cacheHandler) .once(); - EasyMock.expect(cacheHandler.getCache()).andReturn(new ConcurrentHashMap()).once(); + EasyMock.expect(cacheHandler.getCache()).andReturn(new ConcurrentHashMap<>()).once(); cacheHandler.close(); EasyMock.expectLastCall(); final AtomicBoolean threadWasInterrupted = new AtomicBoolean(false); consumerConnector.shutdown(); - EasyMock.expectLastCall().andAnswer(new IAnswer() - { - @Override - public Object answer() - { - threadWasInterrupted.set(Thread.currentThread().isInterrupted()); - return null; - } + EasyMock.expectLastCall().andAnswer(() -> { + threadWasInterrupted.set(Thread.currentThread().isInterrupted()); + return null; }).times(2); PowerMock.replay(cacheManager, cacheHandler, kafkaStream, consumerConnector, consumerIterator); @@ -379,16 +374,15 @@ public class KafkaLookupExtractorFactoryTest EasyMock.expect(consumerConnector.createMessageStreamsByFilter( EasyMock.anyObject(TopicFilter.class), EasyMock.anyInt(), - EasyMock.eq( - DEFAULT_STRING_DECODER), - EasyMock.eq(DEFAULT_STRING_DECODER) + EasyMock.eq(KafkaLookupExtractorFactory.DEFAULT_STRING_DECODER), + EasyMock.eq(KafkaLookupExtractorFactory.DEFAULT_STRING_DECODER) )).andReturn(ImmutableList.of(kafkaStream)).once(); EasyMock.expect(kafkaStream.iterator()).andReturn(consumerIterator).anyTimes(); EasyMock.expect(consumerIterator.hasNext()).andAnswer(getBlockingAnswer()).anyTimes(); EasyMock.expect(cacheManager.createCache()) .andReturn(cacheHandler) .once(); - EasyMock.expect(cacheHandler.getCache()).andReturn(new ConcurrentHashMap()).once(); + EasyMock.expect(cacheHandler.getCache()).andReturn(new ConcurrentHashMap<>()).once(); cacheHandler.close(); EasyMock.expectLastCall().once(); consumerConnector.shutdown(); @@ -421,16 +415,15 @@ public class KafkaLookupExtractorFactoryTest EasyMock.expect(consumerConnector.createMessageStreamsByFilter( EasyMock.anyObject(TopicFilter.class), EasyMock.anyInt(), - EasyMock.eq( - DEFAULT_STRING_DECODER), - EasyMock.eq(DEFAULT_STRING_DECODER) + EasyMock.eq(KafkaLookupExtractorFactory.DEFAULT_STRING_DECODER), + EasyMock.eq(KafkaLookupExtractorFactory.DEFAULT_STRING_DECODER) )).andReturn(ImmutableList.of(kafkaStream)).once(); EasyMock.expect(kafkaStream.iterator()).andReturn(consumerIterator).anyTimes(); EasyMock.expect(consumerIterator.hasNext()).andAnswer(getBlockingAnswer()).anyTimes(); EasyMock.expect(cacheManager.createCache()) .andReturn(cacheHandler) .once(); - EasyMock.expect(cacheHandler.getCache()).andReturn(new ConcurrentHashMap()).once(); + EasyMock.expect(cacheHandler.getCache()).andReturn(new ConcurrentHashMap<>()).once(); cacheHandler.close(); EasyMock.expectLastCall().once(); consumerConnector.shutdown(); @@ -544,21 +537,16 @@ public class KafkaLookupExtractorFactoryTest public void testDefaultDecoder() { final String str = "some string"; - Assert.assertEquals(str, DEFAULT_STRING_DECODER.fromBytes(StringUtils.toUtf8(str))); + Assert.assertEquals(str, KafkaLookupExtractorFactory.DEFAULT_STRING_DECODER.fromBytes(StringUtils.toUtf8(str))); } private IAnswer getBlockingAnswer() { - return new IAnswer() - { - @Override - public Boolean answer() throws Throwable - { - Thread.sleep(60000); - Assert.fail("Test failed to complete within 60000ms"); + return () -> { + Thread.sleep(60000); + Assert.fail("Test failed to complete within 60000ms"); - return false; - } + return false; }; } } diff --git a/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaIndexTaskClientTest.java b/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaIndexTaskClientTest.java index f0178b8a09a..9a835f51c29 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaIndexTaskClientTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaIndexTaskClientTest.java @@ -66,8 +66,6 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import static org.easymock.EasyMock.expect; - @RunWith(Parameterized.class) public class KafkaIndexTaskClientTest extends EasyMockSupport { @@ -85,7 +83,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport private static final long TEST_NUM_RETRIES = 0; private static final String URL_FORMATTER = "http://%s:%d/druid/worker/v1/chat/%s/%s"; - private int numThreads; + private final int numThreads; private HttpClient httpClient; private TaskInfoProvider taskInfoProvider; private FullResponseHolder responseHolder; @@ -114,20 +112,20 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport headers = createMock(HttpHeaders.class); client = new TestableKafkaIndexTaskClient(httpClient, objectMapper, taskInfoProvider); - expect(taskInfoProvider.getTaskLocation(TEST_ID)) - .andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT)) - .anyTimes(); - expect(taskInfoProvider.getTaskStatus(TEST_ID)) - .andReturn(Optional.of(TaskStatus.running(TEST_ID))) - .anyTimes(); + EasyMock.expect(taskInfoProvider.getTaskLocation(TEST_ID)) + .andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT)) + .anyTimes(); + EasyMock.expect(taskInfoProvider.getTaskStatus(TEST_ID)) + .andReturn(Optional.of(TaskStatus.running(TEST_ID))) + .anyTimes(); for (String testId : TEST_IDS) { - expect(taskInfoProvider.getTaskLocation(testId)) - .andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT)) - .anyTimes(); - expect(taskInfoProvider.getTaskStatus(testId)) - .andReturn(Optional.of(TaskStatus.running(testId))) - .anyTimes(); + EasyMock.expect(taskInfoProvider.getTaskLocation(testId)) + .andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT)) + .anyTimes(); + EasyMock.expect(taskInfoProvider.getTaskStatus(testId)) + .andReturn(Optional.of(TaskStatus.running(testId))) + .anyTimes(); } } @@ -141,22 +139,22 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport public void testNoTaskLocation() throws IOException { EasyMock.reset(taskInfoProvider); - expect(taskInfoProvider.getTaskLocation(TEST_ID)).andReturn(TaskLocation.unknown()).anyTimes(); - expect(taskInfoProvider.getTaskStatus(TEST_ID)) - .andReturn(Optional.of(TaskStatus.running(TEST_ID))) - .anyTimes(); + EasyMock.expect(taskInfoProvider.getTaskLocation(TEST_ID)).andReturn(TaskLocation.unknown()).anyTimes(); + EasyMock.expect(taskInfoProvider.getTaskStatus(TEST_ID)) + .andReturn(Optional.of(TaskStatus.running(TEST_ID))) + .anyTimes(); replayAll(); - Assert.assertEquals(false, client.stop(TEST_ID, true)); - Assert.assertEquals(false, client.resume(TEST_ID)); + Assert.assertFalse(client.stop(TEST_ID, true)); + Assert.assertFalse(client.resume(TEST_ID)); Assert.assertEquals(ImmutableMap.of(), client.pause(TEST_ID)); Assert.assertEquals(ImmutableMap.of(), client.pause(TEST_ID)); Assert.assertEquals(Status.NOT_STARTED, client.getStatus(TEST_ID)); - Assert.assertEquals(null, client.getStartTime(TEST_ID)); + Assert.assertNull(client.getStartTime(TEST_ID)); Assert.assertEquals(ImmutableMap.of(), client.getCurrentOffsets(TEST_ID, true)); Assert.assertEquals(ImmutableMap.of(), client.getEndOffsets(TEST_ID)); - Assert.assertEquals(false, client.setEndOffsets(TEST_ID, Collections.emptyMap(), true)); - Assert.assertEquals(false, client.setEndOffsets(TEST_ID, Collections.emptyMap(), true)); + Assert.assertFalse(client.setEndOffsets(TEST_ID, Collections.emptyMap(), true)); + Assert.assertFalse(client.setEndOffsets(TEST_ID, Collections.emptyMap(), true)); verifyAll(); } @@ -168,12 +166,12 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport expectedException.expectMessage("Aborting request because task [test-id] is not runnable"); EasyMock.reset(taskInfoProvider); - expect(taskInfoProvider.getTaskLocation(TEST_ID)) - .andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT)) - .anyTimes(); - expect(taskInfoProvider.getTaskStatus(TEST_ID)) - .andReturn(Optional.of(TaskStatus.failure(TEST_ID))) - .anyTimes(); + EasyMock.expect(taskInfoProvider.getTaskLocation(TEST_ID)) + .andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT)) + .anyTimes(); + EasyMock.expect(taskInfoProvider.getTaskStatus(TEST_ID)) + .andReturn(Optional.of(TaskStatus.failure(TEST_ID))) + .anyTimes(); replayAll(); client.getCurrentOffsets(TEST_ID, true); @@ -186,9 +184,9 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport expectedException.expect(RuntimeException.class); expectedException.expectMessage("org.apache.druid.java.util.common.IOE: Received status [500] and content []"); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.INTERNAL_SERVER_ERROR).times(2); - expect(responseHolder.getContent()).andReturn(""); - expect( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.INTERNAL_SERVER_ERROR).times(2); + EasyMock.expect(responseHolder.getContent()).andReturn(""); + EasyMock.expect( httpClient.go( EasyMock.anyObject(Request.class), EasyMock.anyObject(FullResponseHandler.class), @@ -209,9 +207,9 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport expectedException.expect(IAE.class); expectedException.expectMessage("Received 400 Bad Request with body:"); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.BAD_REQUEST).times(2); - expect(responseHolder.getContent()).andReturn(""); - expect( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.BAD_REQUEST).times(2); + EasyMock.expect(responseHolder.getContent()).andReturn(""); + EasyMock.expect( httpClient.go( EasyMock.anyObject(Request.class), EasyMock.anyObject(FullResponseHandler.class), @@ -229,14 +227,14 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport @Test public void testTaskLocationMismatch() { - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(3) - .andReturn(HttpResponseStatus.OK); - expect(responseHolder.getResponse()).andReturn(response); - expect(responseHolder.getContent()).andReturn("").times(2) - .andReturn("{}"); - expect(response.headers()).andReturn(headers); - expect(headers.get("X-Druid-Task-Id")).andReturn("a-different-task-id"); - expect( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(3) + .andReturn(HttpResponseStatus.OK); + EasyMock.expect(responseHolder.getResponse()).andReturn(response); + EasyMock.expect(responseHolder.getContent()).andReturn("").times(2) + .andReturn("{}"); + EasyMock.expect(response.headers()).andReturn(headers); + EasyMock.expect(headers.get("X-Druid-Task-Id")).andReturn("a-different-task-id"); + EasyMock.expect( httpClient.go( EasyMock.anyObject(Request.class), EasyMock.anyObject(FullResponseHandler.class), @@ -257,9 +255,9 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport public void testGetCurrentOffsets() throws Exception { Capture captured = Capture.newInstance(); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK); - expect(responseHolder.getContent()).andReturn("{\"0\":1, \"1\":10}"); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK); + EasyMock.expect(responseHolder.getContent()).andReturn("{\"0\":1, \"1\":10}"); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -290,15 +288,15 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport client = new TestableKafkaIndexTaskClient(httpClient, objectMapper, taskInfoProvider, 3); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(6) - .andReturn(HttpResponseStatus.OK).times(1); - expect(responseHolder.getContent()).andReturn("").times(4) - .andReturn("{\"0\":1, \"1\":10}"); - expect(responseHolder.getResponse()).andReturn(response).times(2); - expect(response.headers()).andReturn(headers).times(2); - expect(headers.get("X-Druid-Task-Id")).andReturn(TEST_ID).times(2); + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(6) + .andReturn(HttpResponseStatus.OK).times(1); + EasyMock.expect(responseHolder.getContent()).andReturn("").times(4) + .andReturn("{\"0\":1, \"1\":10}"); + EasyMock.expect(responseHolder.getResponse()).andReturn(response).times(2); + EasyMock.expect(response.headers()).andReturn(headers).times(2); + EasyMock.expect(headers.get("X-Druid-Task-Id")).andReturn(TEST_ID).times(2); - expect(httpClient.go( + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -334,13 +332,13 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport client = new TestableKafkaIndexTaskClient(httpClient, objectMapper, taskInfoProvider, 2); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).anyTimes(); - expect(responseHolder.getContent()).andReturn("").anyTimes(); - expect(responseHolder.getResponse()).andReturn(response).anyTimes(); - expect(response.headers()).andReturn(headers).anyTimes(); - expect(headers.get("X-Druid-Task-Id")).andReturn(TEST_ID).anyTimes(); + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).anyTimes(); + EasyMock.expect(responseHolder.getContent()).andReturn("").anyTimes(); + EasyMock.expect(responseHolder.getResponse()).andReturn(response).anyTimes(); + EasyMock.expect(response.headers()).andReturn(headers).anyTimes(); + EasyMock.expect(headers.get("X-Druid-Task-Id")).andReturn(TEST_ID).anyTimes(); - expect( + EasyMock.expect( httpClient.go( EasyMock.anyObject(Request.class), EasyMock.anyObject(FullResponseHandler.class), @@ -357,9 +355,9 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport public void testGetEndOffsets() throws Exception { Capture captured = Capture.newInstance(); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK); - expect(responseHolder.getContent()).andReturn("{\"0\":1, \"1\":10}"); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK); + EasyMock.expect(responseHolder.getContent()).andReturn("{\"0\":1, \"1\":10}"); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -391,13 +389,13 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport DateTime now = DateTimes.nowUtc(); Capture captured = Capture.newInstance(); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(3) - .andReturn(HttpResponseStatus.OK); - expect(responseHolder.getResponse()).andReturn(response); - expect(response.headers()).andReturn(headers); - expect(headers.get("X-Druid-Task-Id")).andReturn(null); - expect(responseHolder.getContent()).andReturn(String.valueOf(now.getMillis())).anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(3) + .andReturn(HttpResponseStatus.OK); + EasyMock.expect(responseHolder.getResponse()).andReturn(response); + EasyMock.expect(response.headers()).andReturn(headers); + EasyMock.expect(headers.get("X-Druid-Task-Id")).andReturn(null); + EasyMock.expect(responseHolder.getContent()).andReturn(String.valueOf(now.getMillis())).anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -426,9 +424,9 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport Status status = Status.READING; Capture captured = Capture.newInstance(); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK); - expect(responseHolder.getContent()).andReturn(StringUtils.format("\"%s\"", status.toString())).anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK); + EasyMock.expect(responseHolder.getContent()).andReturn(StringUtils.format("\"%s\"", status.toString())).anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -455,9 +453,9 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport public void testPause() throws Exception { Capture captured = Capture.newInstance(); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).times(2); - expect(responseHolder.getContent()).andReturn("{\"0\":1, \"1\":10}").anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).times(2); + EasyMock.expect(responseHolder.getContent()).andReturn("{\"0\":1, \"1\":10}").anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -489,25 +487,25 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport Capture captured2 = Capture.newInstance(); Capture captured3 = Capture.newInstance(); // one time in IndexTaskClient.submitRequest() and another in KafkaIndexTaskClient.pause() - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.ACCEPTED).times(2) - .andReturn(HttpResponseStatus.OK).anyTimes(); - expect(responseHolder.getContent()).andReturn("\"PAUSED\"").times(2) - .andReturn("{\"0\":1, \"1\":10}").anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.ACCEPTED).times(2) + .andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(responseHolder.getContent()).andReturn("\"PAUSED\"").times(2) + .andReturn("{\"0\":1, \"1\":10}").anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) )).andReturn( Futures.immediateFuture(responseHolder) ); - expect(httpClient.go( + EasyMock.expect(httpClient.go( EasyMock.capture(captured2), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) )).andReturn( Futures.immediateFuture(responseHolder) ); - expect(httpClient.go( + EasyMock.expect(httpClient.go( EasyMock.capture(captured3), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -551,8 +549,8 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport public void testResume() throws Exception { Capture captured = Capture.newInstance(); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -579,8 +577,8 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport Map endOffsets = ImmutableMap.of(0, 15L, 1, 120L); Capture captured = Capture.newInstance(); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -608,8 +606,8 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport Map endOffsets = ImmutableMap.of(0, 15L, 1, 120L); Capture captured = Capture.newInstance(); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -635,8 +633,8 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport public void testStop() throws Exception { Capture captured = Capture.newInstance(); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -661,8 +659,8 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport public void testStopAndPublish() throws Exception { Capture captured = Capture.newInstance(); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -688,8 +686,8 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport { final int numRequests = TEST_IDS.size(); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -724,8 +722,8 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport { final int numRequests = TEST_IDS.size(); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -760,9 +758,9 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport { final int numRequests = TEST_IDS.size(); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); - expect(responseHolder.getContent()).andReturn("{\"0\":\"1\"}").anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(responseHolder.getContent()).andReturn("{\"0\":\"1\"}").anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -797,9 +795,9 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport { final int numRequests = TEST_IDS.size(); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); - expect(responseHolder.getContent()).andReturn("\"READING\"").anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(responseHolder.getContent()).andReturn("\"READING\"").anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -835,9 +833,9 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport final DateTime now = DateTimes.nowUtc(); final int numRequests = TEST_IDS.size(); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); - expect(responseHolder.getContent()).andReturn(String.valueOf(now.getMillis())).anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(responseHolder.getContent()).andReturn(String.valueOf(now.getMillis())).anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -872,9 +870,9 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport { final int numRequests = TEST_IDS.size(); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); - expect(responseHolder.getContent()).andReturn("{\"0\":\"1\"}").anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(responseHolder.getContent()).andReturn("{\"0\":\"1\"}").anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -909,9 +907,9 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport { final int numRequests = TEST_IDS.size(); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); - expect(responseHolder.getContent()).andReturn("{\"0\":\"1\"}").anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(responseHolder.getContent()).andReturn("{\"0\":\"1\"}").anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -947,8 +945,8 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport final Map endOffsets = ImmutableMap.of(0, 15L, 1, 120L); final int numRequests = TEST_IDS.size(); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -990,8 +988,8 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport final Map endOffsets = ImmutableMap.of(0, 15L, 1, 120L); final int numRequests = TEST_IDS.size(); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -1033,7 +1031,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport private class TestableKafkaIndexTaskClient extends KafkaIndexTaskClient { - public TestableKafkaIndexTaskClient( + TestableKafkaIndexTaskClient( HttpClient httpClient, ObjectMapper jsonMapper, TaskInfoProvider taskInfoProvider @@ -1042,7 +1040,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport this(httpClient, jsonMapper, taskInfoProvider, TEST_NUM_RETRIES); } - public TestableKafkaIndexTaskClient( + TestableKafkaIndexTaskClient( HttpClient httpClient, ObjectMapper jsonMapper, TaskInfoProvider taskInfoProvider, diff --git a/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaIndexTaskTest.java b/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaIndexTaskTest.java index 6c42b3b8562..c9506d6ef20 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaIndexTaskTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/KafkaIndexTaskTest.java @@ -23,7 +23,6 @@ import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.Module; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableList; @@ -106,11 +105,11 @@ import org.apache.druid.query.DefaultQueryRunnerFactoryConglomerate; import org.apache.druid.query.Druids; import org.apache.druid.query.IntervalChunkingQueryRunnerDecorator; import org.apache.druid.query.Query; +import org.apache.druid.query.QueryPlus; import org.apache.druid.query.QueryRunner; import org.apache.druid.query.QueryRunnerFactory; import org.apache.druid.query.QueryRunnerFactoryConglomerate; import org.apache.druid.query.QueryToolChest; -import org.apache.druid.query.QueryWatcher; import org.apache.druid.query.Result; import org.apache.druid.query.SegmentDescriptor; import org.apache.druid.query.aggregation.AggregatorFactory; @@ -185,8 +184,6 @@ import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; -import static org.apache.druid.query.QueryPlus.wrap; - public class KafkaIndexTaskTest { private static final Logger log = new Logger(KafkaIndexTaskTest.class); @@ -663,7 +660,7 @@ public class KafkaIndexTaskTest final Map nextOffsets = ImmutableMap.copyOf(task.getRunner().getCurrentOffsets()); - Assert.assertTrue(checkpoint2.getPartitionSequenceNumberMap().equals(nextOffsets)); + Assert.assertEquals(checkpoint2.getPartitionSequenceNumberMap(), nextOffsets); task.getRunner().setEndOffsets(nextOffsets, false); Assert.assertEquals(TaskState.SUCCESS, future.get().getStatusCode()); @@ -783,7 +780,7 @@ public class KafkaIndexTaskTest Thread.sleep(10); } final Map currentOffsets = ImmutableMap.copyOf(task.getRunner().getCurrentOffsets()); - Assert.assertTrue(checkpoint.getPartitionSequenceNumberMap().equals(currentOffsets)); + Assert.assertEquals(checkpoint.getPartitionSequenceNumberMap(), currentOffsets); task.getRunner().setEndOffsets(currentOffsets, false); Assert.assertEquals(TaskState.SUCCESS, future.get().getStatusCode()); @@ -1287,7 +1284,7 @@ public class KafkaIndexTaskTest // Wait for task to exit Assert.assertEquals(TaskState.SUCCESS, status.getStatusCode()); - Assert.assertEquals(null, status.getErrorMsg()); + Assert.assertNull(status.getErrorMsg()); // Check metrics Assert.assertEquals(4, task.getRunner().getRowIngestionMeters().getProcessed()); @@ -2079,9 +2076,9 @@ public class KafkaIndexTaskTest } for (int i = 0; i < 5; i++) { - Assert.assertEquals(task.getRunner().getStatus(), Status.READING); + Assert.assertEquals(Status.READING, task.getRunner().getStatus()); // Offset should not be reset - Assert.assertTrue(task.getRunner().getCurrentOffsets().get(0) == 200L); + Assert.assertEquals(200L, (long) task.getRunner().getCurrentOffsets().get(0)); } } @@ -2362,9 +2359,7 @@ public class KafkaIndexTaskTest { ScanQuery query = new Druids.ScanQueryBuilder().dataSource( DATA_SCHEMA.getDataSource()).intervals(spec).build(); - List results = - task.getQueryRunner(query).run(wrap(query), new HashMap<>()).toList(); - return results; + return task.getQueryRunner(query).run(QueryPlus.wrap(query), new HashMap<>()).toList(); } private void insertData() throws ExecutionException, InterruptedException @@ -2381,6 +2376,7 @@ public class KafkaIndexTaskTest private ListenableFuture runTask(final Task task) { + //noinspection CatchMayIgnoreException try { taskStorage.insert(task, TaskStatus.running(task.getId())); } @@ -2413,14 +2409,7 @@ public class KafkaIndexTaskTest { return Iterables.find( taskLockbox.findLocksForTask(task), - new Predicate() - { - @Override - public boolean apply(TaskLock lock) - { - return lock.getInterval().contains(interval); - } - } + lock -> lock.getInterval().contains(interval) ); } @@ -2535,13 +2524,8 @@ public class KafkaIndexTaskTest new TimeseriesQueryRunnerFactory( new TimeseriesQueryQueryToolChest(queryRunnerDecorator), new TimeseriesQueryEngine(), - new QueryWatcher() - { - @Override - public void registerQuery(Query query, ListenableFuture future) - { - // do nothing - } + (query, future) -> { + // do nothing } ) ) @@ -2773,7 +2757,7 @@ public class KafkaIndexTaskTest return values; } - public long countEvents(final Task task) + private long countEvents(final Task task) { // Do a query. TimeseriesQuery query = Druids.newTimeseriesQueryBuilder() @@ -2787,7 +2771,7 @@ public class KafkaIndexTaskTest .build(); List> results = - task.getQueryRunner(query).run(wrap(query), ImmutableMap.of()).toList(); + task.getQueryRunner(query).run(QueryPlus.wrap(query), ImmutableMap.of()).toList(); return results.isEmpty() ? 0L : DimensionHandlerUtils.nullToZero(results.get(0).getValue().getLongMetric("rows")); } diff --git a/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java b/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java index b19fea04857..aff5639cd0e 100644 --- a/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java +++ b/extensions-core/kafka-indexing-service/src/test/java/org/apache/druid/indexing/kafka/supervisor/KafkaSupervisorTest.java @@ -19,7 +19,6 @@ package org.apache.druid.indexing.kafka.supervisor; -import com.fasterxml.jackson.core.JsonProcessingException; import com.fasterxml.jackson.databind.ObjectMapper; import com.google.common.base.Optional; import com.google.common.collect.ImmutableList; @@ -116,15 +115,7 @@ import java.util.List; import java.util.Map; import java.util.Properties; import java.util.TreeMap; -import java.util.concurrent.ExecutionException; import java.util.concurrent.Executor; -import java.util.concurrent.TimeoutException; - -import static org.easymock.EasyMock.anyObject; -import static org.easymock.EasyMock.capture; -import static org.easymock.EasyMock.expect; -import static org.easymock.EasyMock.expectLastCall; -import static org.easymock.EasyMock.replay; @RunWith(Parameterized.class) public class KafkaSupervisorTest extends EasyMockSupport @@ -162,6 +153,7 @@ public class KafkaSupervisorTest extends EasyMockSupport private static String getTopic() { + //noinspection StringConcatenationMissingWhitespace return TOPIC_PREFIX + topicPostfix++; } @@ -270,16 +262,16 @@ public class KafkaSupervisorTest extends EasyMockSupport addSomeEvents(1); Capture captured = Capture.newInstance(); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null ) ).anyTimes(); - expect(taskQueue.add(capture(captured))).andReturn(true); - taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); + EasyMock.expect(taskQueue.add(EasyMock.capture(captured))).andReturn(true); + taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); replayAll(); supervisor.start(); @@ -325,25 +317,21 @@ public class KafkaSupervisorTest extends EasyMockSupport addSomeEvents(1); Capture captured = Capture.newInstance(); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null ) ).anyTimes(); - expect(taskQueue.add(capture(captured))).andReturn(true); - taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); + EasyMock.expect(taskQueue.add(EasyMock.capture(captured))).andReturn(true); + taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); replayAll(); supervisor.start(); supervisor.runInternal(); verifyAll(); - - KafkaIndexTask task = captured.getValue(); - KafkaIndexTaskIOConfig taskConfig = task.getIOConfig(); - } @Test @@ -353,15 +341,15 @@ public class KafkaSupervisorTest extends EasyMockSupport addSomeEvents(1); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null ) ).anyTimes(); - expect(taskQueue.add(capture(captured))).andReturn(true).times(2); + EasyMock.expect(taskQueue.add(EasyMock.capture(captured))).andReturn(true).times(2); replayAll(); supervisor.start(); @@ -408,15 +396,15 @@ public class KafkaSupervisorTest extends EasyMockSupport addSomeEvents(1); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null ) ).anyTimes(); - expect(taskQueue.add(capture(captured))).andReturn(true).times(2); + EasyMock.expect(taskQueue.add(EasyMock.capture(captured))).andReturn(true).times(2); replayAll(); supervisor.start(); @@ -463,15 +451,15 @@ public class KafkaSupervisorTest extends EasyMockSupport addSomeEvents(1); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null ) ).anyTimes(); - expect(taskQueue.add(capture(captured))).andReturn(true).times(2); + EasyMock.expect(taskQueue.add(EasyMock.capture(captured))).andReturn(true).times(2); replayAll(); supervisor.start(); @@ -502,15 +490,15 @@ public class KafkaSupervisorTest extends EasyMockSupport addSomeEvents(1); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null ) ).anyTimes(); - expect(taskQueue.add(capture(captured))).andReturn(true).times(2); + EasyMock.expect(taskQueue.add(EasyMock.capture(captured))).andReturn(true).times(2); replayAll(); supervisor.start(); @@ -534,25 +522,25 @@ public class KafkaSupervisorTest extends EasyMockSupport ); } - @Test /** * Test generating the starting offsets from the partition high water marks in Kafka. */ + @Test public void testLatestOffset() throws Exception { supervisor = getTestableSupervisor(1, 1, false, "PT1H", null, null); addSomeEvents(1100); Capture captured = Capture.newInstance(); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null ) ).anyTimes(); - expect(taskQueue.add(capture(captured))).andReturn(true); + EasyMock.expect(taskQueue.add(EasyMock.capture(captured))).andReturn(true); replayAll(); supervisor.start(); @@ -574,21 +562,21 @@ public class KafkaSupervisorTest extends EasyMockSupport ); } - @Test /** * Test generating the starting offsets from the partition data stored in druid_dataSource which contains the * offsets of the last built segments. */ + @Test public void testDatasourceMetadata() throws Exception { supervisor = getTestableSupervisor(1, 1, true, "PT1H", null, null); addSomeEvents(100); Capture captured = Capture.newInstance(); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( new SeekableStreamStartSequenceNumbers<>( topic, @@ -597,7 +585,7 @@ public class KafkaSupervisorTest extends EasyMockSupport ) ) ).anyTimes(); - expect(taskQueue.add(capture(captured))).andReturn(true); + EasyMock.expect(taskQueue.add(EasyMock.capture(captured))).andReturn(true); replayAll(); supervisor.start(); @@ -627,9 +615,9 @@ public class KafkaSupervisorTest extends EasyMockSupport supervisor = getTestableSupervisor(1, 1, true, "PT1H", null, null); addSomeEvents(1); - expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( new SeekableStreamStartSequenceNumbers<>( topic, @@ -680,25 +668,25 @@ public class KafkaSupervisorTest extends EasyMockSupport List existingTasks = ImmutableList.of(id1, id2); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(existingTasks).anyTimes(); - expect(taskClient.getStatusAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(Status.NOT_STARTED)) - .anyTimes(); - expect(taskClient.getStartTimeAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) - .anyTimes(); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(existingTasks).anyTimes(); + EasyMock.expect(taskClient.getStatusAsync(EasyMock.anyString())) + .andReturn(Futures.immediateFuture(Status.NOT_STARTED)) + .anyTimes(); + EasyMock.expect(taskClient.getStartTimeAsync(EasyMock.anyString())) + .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) + .anyTimes(); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null ) ).anyTimes(); - taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); + taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); - expect(taskQueue.add(anyObject(Task.class))).andReturn(true).anyTimes(); + EasyMock.expect(taskQueue.add(EasyMock.anyObject(Task.class))).andReturn(true).anyTimes(); replayAll(); @@ -764,47 +752,47 @@ public class KafkaSupervisorTest extends EasyMockSupport List existingTasks = ImmutableList.of(id1, id2, id3, id4, id5); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskRunner.getRunningTasks()).andReturn(Collections.emptyList()).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(existingTasks).anyTimes(); - expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); - expect(taskStorage.getStatus("id2")).andReturn(Optional.of(TaskStatus.running("id2"))).anyTimes(); - expect(taskStorage.getStatus("id3")).andReturn(Optional.of(TaskStatus.running("id3"))).anyTimes(); - expect(taskStorage.getStatus("id4")).andReturn(Optional.of(TaskStatus.running("id4"))).anyTimes(); - expect(taskStorage.getStatus("id5")).andReturn(Optional.of(TaskStatus.running("id5"))).anyTimes(); - expect(taskStorage.getTask("id1")).andReturn(Optional.of(id1)).anyTimes(); - expect(taskStorage.getTask("id2")).andReturn(Optional.of(id2)).anyTimes(); - expect(taskStorage.getTask("id3")).andReturn(Optional.of(id3)).anyTimes(); - expect(taskStorage.getTask("id4")).andReturn(Optional.of(id4)).anyTimes(); - expect(taskStorage.getTask("id5")).andReturn(Optional.of(id5)).anyTimes(); - expect(taskClient.getStatusAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(Status.NOT_STARTED)) - .anyTimes(); - expect(taskClient.getStartTimeAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) - .anyTimes(); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskRunner.getRunningTasks()).andReturn(Collections.emptyList()).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(existingTasks).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id2")).andReturn(Optional.of(TaskStatus.running("id2"))).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id3")).andReturn(Optional.of(TaskStatus.running("id3"))).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id4")).andReturn(Optional.of(TaskStatus.running("id4"))).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id5")).andReturn(Optional.of(TaskStatus.running("id5"))).anyTimes(); + EasyMock.expect(taskStorage.getTask("id1")).andReturn(Optional.of(id1)).anyTimes(); + EasyMock.expect(taskStorage.getTask("id2")).andReturn(Optional.of(id2)).anyTimes(); + EasyMock.expect(taskStorage.getTask("id3")).andReturn(Optional.of(id3)).anyTimes(); + EasyMock.expect(taskStorage.getTask("id4")).andReturn(Optional.of(id4)).anyTimes(); + EasyMock.expect(taskStorage.getTask("id5")).andReturn(Optional.of(id5)).anyTimes(); + EasyMock.expect(taskClient.getStatusAsync(EasyMock.anyString())) + .andReturn(Futures.immediateFuture(Status.NOT_STARTED)) + .anyTimes(); + EasyMock.expect(taskClient.getStartTimeAsync(EasyMock.anyString())) + .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) + .anyTimes(); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null ) ).anyTimes(); - expect(taskClient.stopAsync("id3", false)).andReturn(Futures.immediateFuture(true)); - expect(taskClient.stopAsync("id4", false)).andReturn(Futures.immediateFuture(false)); - expect(taskClient.stopAsync("id5", false)).andReturn(Futures.immediateFuture((Boolean) null)); + EasyMock.expect(taskClient.stopAsync("id3", false)).andReturn(Futures.immediateFuture(true)); + EasyMock.expect(taskClient.stopAsync("id4", false)).andReturn(Futures.immediateFuture(false)); + EasyMock.expect(taskClient.stopAsync("id5", false)).andReturn(Futures.immediateFuture(null)); TreeMap> checkpoints1 = new TreeMap<>(); checkpoints1.put(0, ImmutableMap.of(0, 0L, 2, 0L)); TreeMap> checkpoints2 = new TreeMap<>(); checkpoints2.put(0, ImmutableMap.of(1, 0L)); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("id1"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints1)) - .times(1); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("id2"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints2)) - .times(1); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("id1"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints1)) + .times(1); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("id2"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints2)) + .times(1); - taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); + taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); taskQueue.shutdown("id4", "Task [%s] failed to stop in a timely manner, killing task", "id4"); taskQueue.shutdown("id5", "Task [%s] failed to stop in a timely manner, killing task", "id5"); replayAll(); @@ -822,35 +810,35 @@ public class KafkaSupervisorTest extends EasyMockSupport addSomeEvents(1); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - expect(taskClient.getStatusAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(Status.NOT_STARTED)) - .anyTimes(); - expect(taskClient.getStartTimeAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) - .anyTimes(); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + EasyMock.expect(taskClient.getStatusAsync(EasyMock.anyString())) + .andReturn(Futures.immediateFuture(Status.NOT_STARTED)) + .anyTimes(); + EasyMock.expect(taskClient.getStartTimeAsync(EasyMock.anyString())) + .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) + .anyTimes(); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null ) ).anyTimes(); - expect(taskQueue.add(capture(captured))).andReturn(true).times(4); + EasyMock.expect(taskQueue.add(EasyMock.capture(captured))).andReturn(true).times(4); TreeMap> checkpoints1 = new TreeMap<>(); checkpoints1.put(0, ImmutableMap.of(0, 0L, 2, 0L)); TreeMap> checkpoints2 = new TreeMap<>(); checkpoints2.put(0, ImmutableMap.of(1, 0L)); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-0"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints1)) - .anyTimes(); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-1"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints2)) - .anyTimes(); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-0"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints1)) + .anyTimes(); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-1"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints2)) + .anyTimes(); - taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); + taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); replayAll(); supervisor.start(); @@ -862,14 +850,14 @@ public class KafkaSupervisorTest extends EasyMockSupport // test that running the main loop again checks the status of the tasks that were created and does nothing if they // are all still running EasyMock.reset(taskStorage); - expect(taskStorage.getActiveTasks()).andReturn(tasks).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(tasks).anyTimes(); for (Task task : tasks) { - expect(taskStorage.getStatus(task.getId())) - .andReturn(Optional.of(TaskStatus.running(task.getId()))) - .anyTimes(); - expect(taskStorage.getTask(task.getId())).andReturn(Optional.of(task)).anyTimes(); + EasyMock.expect(taskStorage.getStatus(task.getId())) + .andReturn(Optional.of(TaskStatus.running(task.getId()))) + .anyTimes(); + EasyMock.expect(taskStorage.getTask(task.getId())).andReturn(Optional.of(task)).anyTimes(); } - replay(taskStorage); + EasyMock.replay(taskStorage); supervisor.runInternal(); verifyAll(); @@ -880,19 +868,19 @@ public class KafkaSupervisorTest extends EasyMockSupport KafkaIndexTask iHaveFailed = (KafkaIndexTask) tasks.get(3); EasyMock.reset(taskStorage); EasyMock.reset(taskQueue); - expect(taskStorage.getActiveTasks()).andReturn(imStillAlive).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(imStillAlive).anyTimes(); for (Task task : imStillAlive) { - expect(taskStorage.getStatus(task.getId())) - .andReturn(Optional.of(TaskStatus.running(task.getId()))) - .anyTimes(); - expect(taskStorage.getTask(task.getId())).andReturn(Optional.of(task)).anyTimes(); + EasyMock.expect(taskStorage.getStatus(task.getId())) + .andReturn(Optional.of(TaskStatus.running(task.getId()))) + .anyTimes(); + EasyMock.expect(taskStorage.getTask(task.getId())).andReturn(Optional.of(task)).anyTimes(); } - expect(taskStorage.getStatus(iHaveFailed.getId())) - .andReturn(Optional.of(TaskStatus.failure(iHaveFailed.getId()))); - expect(taskStorage.getTask(iHaveFailed.getId())).andReturn(Optional.of((Task) iHaveFailed)).anyTimes(); - expect(taskQueue.add(capture(aNewTaskCapture))).andReturn(true); - replay(taskStorage); - replay(taskQueue); + EasyMock.expect(taskStorage.getStatus(iHaveFailed.getId())) + .andReturn(Optional.of(TaskStatus.failure(iHaveFailed.getId()))); + EasyMock.expect(taskStorage.getTask(iHaveFailed.getId())).andReturn(Optional.of(iHaveFailed)).anyTimes(); + EasyMock.expect(taskQueue.add(EasyMock.capture(aNewTaskCapture))).andReturn(true); + EasyMock.replay(taskStorage); + EasyMock.replay(taskQueue); supervisor.runInternal(); verifyAll(); @@ -925,16 +913,16 @@ public class KafkaSupervisorTest extends EasyMockSupport List existingTasks = ImmutableList.of(id1); Capture captured = Capture.newInstance(); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(existingTasks).anyTimes(); - expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); - expect(taskStorage.getTask("id1")).andReturn(Optional.of(id1)).anyTimes(); - expect(taskClient.getStatusAsync("id1")).andReturn(Futures.immediateFuture(Status.READING)); - expect(taskClient.getStartTimeAsync("id1")).andReturn(Futures.immediateFuture(now)).anyTimes(); - expect(taskQueue.add(capture(captured))).andReturn(true); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(existingTasks).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); + EasyMock.expect(taskStorage.getTask("id1")).andReturn(Optional.of(id1)).anyTimes(); + EasyMock.expect(taskClient.getStatusAsync("id1")).andReturn(Futures.immediateFuture(Status.READING)); + EasyMock.expect(taskClient.getStartTimeAsync("id1")).andReturn(Futures.immediateFuture(now)).anyTimes(); + EasyMock.expect(taskQueue.add(EasyMock.capture(captured))).andReturn(true); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null ) @@ -942,11 +930,11 @@ public class KafkaSupervisorTest extends EasyMockSupport TreeMap> checkpoints = new TreeMap<>(); checkpoints.put(0, ImmutableMap.of(0, 0L, 2, 0L)); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("id1"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints)) - .times(2); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("id1"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints)) + .times(2); - taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); + taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); replayAll(); supervisor.start(); @@ -965,27 +953,27 @@ public class KafkaSupervisorTest extends EasyMockSupport EasyMock.reset(taskClient); // for the newly created replica task - expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-0"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints)) - .times(2); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("id1"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints)) - .times(1); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-0"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints)) + .times(2); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("id1"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints)) + .times(1); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(captured.getValue())).anyTimes(); - expect(taskStorage.getStatus(iHaveFailed.getId())) - .andReturn(Optional.of(TaskStatus.failure(iHaveFailed.getId()))); - expect(taskStorage.getStatus(runningTaskId)) - .andReturn(Optional.of(TaskStatus.running(runningTaskId))) - .anyTimes(); - expect(taskStorage.getTask(iHaveFailed.getId())).andReturn(Optional.of((Task) iHaveFailed)).anyTimes(); - expect(taskStorage.getTask(runningTaskId)).andReturn(Optional.of(captured.getValue())).anyTimes(); - expect(taskClient.getStatusAsync(runningTaskId)).andReturn(Futures.immediateFuture(Status.READING)); - expect(taskClient.getStartTimeAsync(runningTaskId)).andReturn(Futures.immediateFuture(now)).anyTimes(); - expect(taskQueue.add(capture(aNewTaskCapture))).andReturn(true); - replay(taskStorage); - replay(taskQueue); - replay(taskClient); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(captured.getValue())).anyTimes(); + EasyMock.expect(taskStorage.getStatus(iHaveFailed.getId())) + .andReturn(Optional.of(TaskStatus.failure(iHaveFailed.getId()))); + EasyMock.expect(taskStorage.getStatus(runningTaskId)) + .andReturn(Optional.of(TaskStatus.running(runningTaskId))) + .anyTimes(); + EasyMock.expect(taskStorage.getTask(iHaveFailed.getId())).andReturn(Optional.of(iHaveFailed)).anyTimes(); + EasyMock.expect(taskStorage.getTask(runningTaskId)).andReturn(Optional.of(captured.getValue())).anyTimes(); + EasyMock.expect(taskClient.getStatusAsync(runningTaskId)).andReturn(Futures.immediateFuture(Status.READING)); + EasyMock.expect(taskClient.getStartTimeAsync(runningTaskId)).andReturn(Futures.immediateFuture(now)).anyTimes(); + EasyMock.expect(taskQueue.add(EasyMock.capture(aNewTaskCapture))).andReturn(true); + EasyMock.replay(taskStorage); + EasyMock.replay(taskQueue); + EasyMock.replay(taskClient); supervisor.runInternal(); verifyAll(); @@ -1012,23 +1000,23 @@ public class KafkaSupervisorTest extends EasyMockSupport addSomeEvents(1); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - expect(taskClient.getStatusAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(Status.NOT_STARTED)) - .anyTimes(); - expect(taskClient.getStartTimeAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) - .anyTimes(); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + EasyMock.expect(taskClient.getStatusAsync(EasyMock.anyString())) + .andReturn(Futures.immediateFuture(Status.NOT_STARTED)) + .anyTimes(); + EasyMock.expect(taskClient.getStartTimeAsync(EasyMock.anyString())) + .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) + .anyTimes(); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null ) ).anyTimes(); - expect(taskQueue.add(capture(captured))).andReturn(true).times(4); - taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); + EasyMock.expect(taskQueue.add(EasyMock.capture(captured))).andReturn(true).times(4); + taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); replayAll(); @@ -1041,33 +1029,33 @@ public class KafkaSupervisorTest extends EasyMockSupport EasyMock.reset(taskStorage); EasyMock.reset(taskClient); - expect(taskClient.getStatusAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(Status.NOT_STARTED)) - .anyTimes(); - expect(taskClient.getStartTimeAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) - .anyTimes(); + EasyMock.expect(taskClient.getStatusAsync(EasyMock.anyString())) + .andReturn(Futures.immediateFuture(Status.NOT_STARTED)) + .anyTimes(); + EasyMock.expect(taskClient.getStartTimeAsync(EasyMock.anyString())) + .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) + .anyTimes(); TreeMap> checkpoints1 = new TreeMap<>(); checkpoints1.put(0, ImmutableMap.of(0, 0L, 2, 0L)); TreeMap> checkpoints2 = new TreeMap<>(); checkpoints2.put(0, ImmutableMap.of(1, 0L)); // there would be 4 tasks, 2 for each task group - expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-0"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints1)) - .times(2); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-1"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints2)) - .times(2); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-0"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints1)) + .times(2); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-1"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints2)) + .times(2); - expect(taskStorage.getActiveTasks()).andReturn(tasks).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(tasks).anyTimes(); for (Task task : tasks) { - expect(taskStorage.getStatus(task.getId())) - .andReturn(Optional.of(TaskStatus.running(task.getId()))) - .anyTimes(); - expect(taskStorage.getTask(task.getId())).andReturn(Optional.of(task)).anyTimes(); + EasyMock.expect(taskStorage.getStatus(task.getId())) + .andReturn(Optional.of(TaskStatus.running(task.getId()))) + .anyTimes(); + EasyMock.expect(taskStorage.getTask(task.getId())).andReturn(Optional.of(task)).anyTimes(); } - replay(taskStorage); - replay(taskClient); + EasyMock.replay(taskStorage); + EasyMock.replay(taskClient); supervisor.runInternal(); verifyAll(); @@ -1081,22 +1069,22 @@ public class KafkaSupervisorTest extends EasyMockSupport EasyMock.reset(taskStorage); EasyMock.reset(taskQueue); EasyMock.reset(taskClient); - expect(taskStorage.getActiveTasks()).andReturn(imStillRunning).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(imStillRunning).anyTimes(); for (Task task : imStillRunning) { - expect(taskStorage.getStatus(task.getId())) - .andReturn(Optional.of(TaskStatus.running(task.getId()))) - .anyTimes(); - expect(taskStorage.getTask(task.getId())).andReturn(Optional.of(task)).anyTimes(); + EasyMock.expect(taskStorage.getStatus(task.getId())) + .andReturn(Optional.of(TaskStatus.running(task.getId()))) + .anyTimes(); + EasyMock.expect(taskStorage.getTask(task.getId())).andReturn(Optional.of(task)).anyTimes(); } - expect(taskStorage.getStatus(iAmSuccess.getId())) - .andReturn(Optional.of(TaskStatus.success(iAmSuccess.getId()))); - expect(taskStorage.getTask(iAmSuccess.getId())).andReturn(Optional.of((Task) iAmSuccess)).anyTimes(); - expect(taskQueue.add(capture(newTasksCapture))).andReturn(true).times(2); - expect(taskClient.stopAsync(capture(shutdownTaskIdCapture), EasyMock.eq(false))) - .andReturn(Futures.immediateFuture(true)); - replay(taskStorage); - replay(taskQueue); - replay(taskClient); + EasyMock.expect(taskStorage.getStatus(iAmSuccess.getId())) + .andReturn(Optional.of(TaskStatus.success(iAmSuccess.getId()))); + EasyMock.expect(taskStorage.getTask(iAmSuccess.getId())).andReturn(Optional.of(iAmSuccess)).anyTimes(); + EasyMock.expect(taskQueue.add(EasyMock.capture(newTasksCapture))).andReturn(true).times(2); + EasyMock.expect(taskClient.stopAsync(EasyMock.capture(shutdownTaskIdCapture), EasyMock.eq(false))) + .andReturn(Futures.immediateFuture(true)); + EasyMock.replay(taskStorage); + EasyMock.replay(taskQueue); + EasyMock.replay(taskClient); supervisor.runInternal(); verifyAll(); @@ -1114,17 +1102,17 @@ public class KafkaSupervisorTest extends EasyMockSupport addSomeEvents(100); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null ) ).anyTimes(); - expect(taskQueue.add(capture(captured))).andReturn(true).times(4); - taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); + EasyMock.expect(taskQueue.add(EasyMock.capture(captured))).andReturn(true).times(4); + taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); replayAll(); supervisor.start(); @@ -1139,47 +1127,47 @@ public class KafkaSupervisorTest extends EasyMockSupport EasyMock.reset(taskStorage, taskRunner, taskClient, taskQueue); captured = Capture.newInstance(CaptureType.ALL); - expect(taskStorage.getActiveTasks()).andReturn(tasks).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(tasks).anyTimes(); for (Task task : tasks) { - expect(taskStorage.getStatus(task.getId())) - .andReturn(Optional.of(TaskStatus.running(task.getId()))) - .anyTimes(); - expect(taskStorage.getTask(task.getId())).andReturn(Optional.of(task)).anyTimes(); + EasyMock.expect(taskStorage.getStatus(task.getId())) + .andReturn(Optional.of(TaskStatus.running(task.getId()))) + .anyTimes(); + EasyMock.expect(taskStorage.getTask(task.getId())).andReturn(Optional.of(task)).anyTimes(); } - expect(taskRunner.getRunningTasks()).andReturn(workItems).anyTimes(); - expect(taskClient.getStatusAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(Status.READING)) - .anyTimes(); - expect(taskClient.getStartTimeAsync(EasyMock.contains("sequenceName-0"))) - .andReturn(Futures.immediateFuture(DateTimes.nowUtc().minusMinutes(2))) - .andReturn(Futures.immediateFuture(DateTimes.nowUtc())); - expect(taskClient.getStartTimeAsync(EasyMock.contains("sequenceName-1"))) - .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) - .times(2); - expect(taskClient.pauseAsync(EasyMock.contains("sequenceName-0"))) - .andReturn(Futures.immediateFuture((Map) ImmutableMap.of(0, 10L, 1, 20L, 2, 30L))) - .andReturn(Futures.immediateFuture((Map) ImmutableMap.of(0, 10L, 1, 15L, 2, 35L))); - expect( + EasyMock.expect(taskRunner.getRunningTasks()).andReturn(workItems).anyTimes(); + EasyMock.expect(taskClient.getStatusAsync(EasyMock.anyString())) + .andReturn(Futures.immediateFuture(Status.READING)) + .anyTimes(); + EasyMock.expect(taskClient.getStartTimeAsync(EasyMock.contains("sequenceName-0"))) + .andReturn(Futures.immediateFuture(DateTimes.nowUtc().minusMinutes(2))) + .andReturn(Futures.immediateFuture(DateTimes.nowUtc())); + EasyMock.expect(taskClient.getStartTimeAsync(EasyMock.contains("sequenceName-1"))) + .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) + .times(2); + EasyMock.expect(taskClient.pauseAsync(EasyMock.contains("sequenceName-0"))) + .andReturn(Futures.immediateFuture(ImmutableMap.of(0, 10L, 1, 20L, 2, 30L))) + .andReturn(Futures.immediateFuture(ImmutableMap.of(0, 10L, 1, 15L, 2, 35L))); + EasyMock.expect( taskClient.setEndOffsetsAsync( EasyMock.contains("sequenceName-0"), EasyMock.eq(ImmutableMap.of(0, 10L, 1, 20L, 2, 35L)), EasyMock.eq(true) ) ).andReturn(Futures.immediateFuture(true)).times(2); - expect(taskQueue.add(capture(captured))).andReturn(true).times(2); + EasyMock.expect(taskQueue.add(EasyMock.capture(captured))).andReturn(true).times(2); TreeMap> checkpoints1 = new TreeMap<>(); checkpoints1.put(0, ImmutableMap.of(0, 0L, 2, 0L)); TreeMap> checkpoints2 = new TreeMap<>(); checkpoints2.put(0, ImmutableMap.of(1, 0L)); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-0"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints1)) - .times(2); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-1"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints2)) - .times(2); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-0"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints1)) + .times(2); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-1"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints2)) + .times(2); - replay(taskStorage, taskRunner, taskClient, taskQueue); + EasyMock.replay(taskStorage, taskRunner, taskClient, taskQueue); supervisor.runInternal(); verifyAll(); @@ -1225,30 +1213,30 @@ public class KafkaSupervisorTest extends EasyMockSupport workItems.add(new TestTaskRunnerWorkItem(task, null, location)); Capture captured = Capture.newInstance(); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskRunner.getRunningTasks()).andReturn(workItems).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(task)).anyTimes(); - expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); - expect(taskStorage.getTask("id1")).andReturn(Optional.of(task)).anyTimes(); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskRunner.getRunningTasks()).andReturn(workItems).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(task)).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); + EasyMock.expect(taskStorage.getTask("id1")).andReturn(Optional.of(task)).anyTimes(); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null ) ).anyTimes(); - expect(taskClient.getStatusAsync("id1")).andReturn(Futures.immediateFuture(Status.PUBLISHING)); - expect(taskClient.getCurrentOffsetsAsync("id1", false)) - .andReturn(Futures.immediateFuture((Map) ImmutableMap.of(0, 10L, 1, 20L, 2, 30L))); - expect(taskClient.getEndOffsets("id1")).andReturn(ImmutableMap.of(0, 10L, 1, 20L, 2, 30L)); - expect(taskQueue.add(capture(captured))).andReturn(true); + EasyMock.expect(taskClient.getStatusAsync("id1")).andReturn(Futures.immediateFuture(Status.PUBLISHING)); + EasyMock.expect(taskClient.getCurrentOffsetsAsync("id1", false)) + .andReturn(Futures.immediateFuture(ImmutableMap.of(0, 10L, 1, 20L, 2, 30L))); + EasyMock.expect(taskClient.getEndOffsets("id1")).andReturn(ImmutableMap.of(0, 10L, 1, 20L, 2, 30L)); + EasyMock.expect(taskQueue.add(EasyMock.capture(captured))).andReturn(true); TreeMap> checkpoints = new TreeMap<>(); checkpoints.put(0, ImmutableMap.of(0, 0L, 1, 0L, 2, 0L)); - expect(taskClient.getCheckpoints(EasyMock.anyString(), EasyMock.anyBoolean())) - .andReturn(checkpoints) - .anyTimes(); + EasyMock.expect(taskClient.getCheckpoints(EasyMock.anyString(), EasyMock.anyBoolean())) + .andReturn(checkpoints) + .anyTimes(); - taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); + taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); replayAll(); supervisor.start(); @@ -1339,24 +1327,24 @@ public class KafkaSupervisorTest extends EasyMockSupport workItems.add(new TestTaskRunnerWorkItem(task, null, location)); Capture captured = Capture.newInstance(); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskRunner.getRunningTasks()).andReturn(workItems).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(task)).anyTimes(); - expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); - expect(taskStorage.getTask("id1")).andReturn(Optional.of(task)).anyTimes(); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskRunner.getRunningTasks()).andReturn(workItems).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(task)).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); + EasyMock.expect(taskStorage.getTask("id1")).andReturn(Optional.of(task)).anyTimes(); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null ) ).anyTimes(); - expect(taskClient.getStatusAsync("id1")).andReturn(Futures.immediateFuture(Status.PUBLISHING)); - expect(taskClient.getCurrentOffsetsAsync("id1", false)) - .andReturn(Futures.immediateFuture((Map) ImmutableMap.of(0, 10L, 2, 30L))); - expect(taskClient.getEndOffsets("id1")).andReturn(ImmutableMap.of(0, 10L, 2, 30L)); - expect(taskQueue.add(capture(captured))).andReturn(true); + EasyMock.expect(taskClient.getStatusAsync("id1")).andReturn(Futures.immediateFuture(Status.PUBLISHING)); + EasyMock.expect(taskClient.getCurrentOffsetsAsync("id1", false)) + .andReturn(Futures.immediateFuture(ImmutableMap.of(0, 10L, 2, 30L))); + EasyMock.expect(taskClient.getEndOffsets("id1")).andReturn(ImmutableMap.of(0, 10L, 2, 30L)); + EasyMock.expect(taskQueue.add(EasyMock.capture(captured))).andReturn(true); - taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); + taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); replayAll(); supervisor.start(); @@ -1465,36 +1453,36 @@ public class KafkaSupervisorTest extends EasyMockSupport workItems.add(new TestTaskRunnerWorkItem(id1, null, location1)); workItems.add(new TestTaskRunnerWorkItem(id2, null, location2)); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskRunner.getRunningTasks()).andReturn(workItems).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(id1, id2)).anyTimes(); - expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); - expect(taskStorage.getStatus("id2")).andReturn(Optional.of(TaskStatus.running("id2"))).anyTimes(); - expect(taskStorage.getTask("id1")).andReturn(Optional.of(id1)).anyTimes(); - expect(taskStorage.getTask("id2")).andReturn(Optional.of(id2)).anyTimes(); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskRunner.getRunningTasks()).andReturn(workItems).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(id1, id2)).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id2")).andReturn(Optional.of(TaskStatus.running("id2"))).anyTimes(); + EasyMock.expect(taskStorage.getTask("id1")).andReturn(Optional.of(id1)).anyTimes(); + EasyMock.expect(taskStorage.getTask("id2")).andReturn(Optional.of(id2)).anyTimes(); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null ) ).anyTimes(); - expect(taskClient.getStatusAsync("id1")).andReturn(Futures.immediateFuture(Status.PUBLISHING)); - expect(taskClient.getStatusAsync("id2")).andReturn(Futures.immediateFuture(Status.READING)); - expect(taskClient.getStartTimeAsync("id2")).andReturn(Futures.immediateFuture(startTime)); - expect(taskClient.getCurrentOffsetsAsync("id1", false)) - .andReturn(Futures.immediateFuture((Map) ImmutableMap.of(0, 1L, 1, 2L, 2, 3L))); - expect(taskClient.getEndOffsets("id1")).andReturn(ImmutableMap.of(0, 1L, 1, 2L, 2, 3L)); - expect(taskClient.getCurrentOffsetsAsync("id2", false)) - .andReturn(Futures.immediateFuture((Map) ImmutableMap.of(0, 4L, 1, 5L, 2, 6L))); + EasyMock.expect(taskClient.getStatusAsync("id1")).andReturn(Futures.immediateFuture(Status.PUBLISHING)); + EasyMock.expect(taskClient.getStatusAsync("id2")).andReturn(Futures.immediateFuture(Status.READING)); + EasyMock.expect(taskClient.getStartTimeAsync("id2")).andReturn(Futures.immediateFuture(startTime)); + EasyMock.expect(taskClient.getCurrentOffsetsAsync("id1", false)) + .andReturn(Futures.immediateFuture(ImmutableMap.of(0, 1L, 1, 2L, 2, 3L))); + EasyMock.expect(taskClient.getEndOffsets("id1")).andReturn(ImmutableMap.of(0, 1L, 1, 2L, 2, 3L)); + EasyMock.expect(taskClient.getCurrentOffsetsAsync("id2", false)) + .andReturn(Futures.immediateFuture(ImmutableMap.of(0, 4L, 1, 5L, 2, 6L))); - taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); + taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); // since id1 is publishing, so getCheckpoints wouldn't be called for it TreeMap> checkpoints = new TreeMap<>(); checkpoints.put(0, ImmutableMap.of(0, 1L, 1, 2L, 2, 3L)); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("id2"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints)) - .times(1); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("id2"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints)) + .times(1); replayAll(); @@ -1530,7 +1518,7 @@ public class KafkaSupervisorTest extends EasyMockSupport Assert.assertEquals("id1", publishingReport.getId()); Assert.assertEquals(ImmutableMap.of(0, 0L, 1, 0L, 2, 0L), publishingReport.getStartingOffsets()); Assert.assertEquals(ImmutableMap.of(0, 1L, 1, 2L, 2, 3L), publishingReport.getCurrentOffsets()); - Assert.assertEquals(null, publishingReport.getLag()); + Assert.assertNull(publishingReport.getLag()); Assert.assertEquals(ImmutableMap.of(0, 7L, 1, 7L, 2, 7L), payload.getLatestOffsets()); Assert.assertEquals(ImmutableMap.of(0, 3L, 1, 2L, 2, 1L), payload.getMinimumLag()); @@ -1545,17 +1533,17 @@ public class KafkaSupervisorTest extends EasyMockSupport addSomeEvents(1); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null ) ).anyTimes(); - expect(taskQueue.add(capture(captured))).andReturn(true).times(4); - taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); + EasyMock.expect(taskQueue.add(EasyMock.capture(captured))).andReturn(true).times(4); + taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); replayAll(); supervisor.start(); @@ -1570,26 +1558,26 @@ public class KafkaSupervisorTest extends EasyMockSupport checkpoints1.put(0, ImmutableMap.of(0, 0L, 2, 0L)); TreeMap> checkpoints2 = new TreeMap<>(); checkpoints2.put(0, ImmutableMap.of(1, 0L)); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-0"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints1)) - .times(2); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-1"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints2)) - .times(2); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-0"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints1)) + .times(2); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-1"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints2)) + .times(2); - expect(taskStorage.getActiveTasks()).andReturn(tasks).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(tasks).anyTimes(); for (Task task : tasks) { - expect(taskStorage.getStatus(task.getId())) - .andReturn(Optional.of(TaskStatus.running(task.getId()))) - .anyTimes(); - expect(taskStorage.getTask(task.getId())).andReturn(Optional.of(task)).anyTimes(); - expect(taskClient.getStatusAsync(task.getId())) - .andReturn(Futures.immediateFuture(Status.NOT_STARTED)); - expect(taskClient.getStartTimeAsync(task.getId())) - .andReturn(Futures.immediateFailedFuture(new RuntimeException())); + EasyMock.expect(taskStorage.getStatus(task.getId())) + .andReturn(Optional.of(TaskStatus.running(task.getId()))) + .anyTimes(); + EasyMock.expect(taskStorage.getTask(task.getId())).andReturn(Optional.of(task)).anyTimes(); + EasyMock.expect(taskClient.getStatusAsync(task.getId())) + .andReturn(Futures.immediateFuture(Status.NOT_STARTED)); + EasyMock.expect(taskClient.getStartTimeAsync(task.getId())) + .andReturn(Futures.immediateFailedFuture(new RuntimeException())); taskQueue.shutdown(task.getId(), "Task [%s] failed to return start time, killing task", task.getId()); } - replay(taskStorage, taskClient, taskQueue); + EasyMock.replay(taskStorage, taskClient, taskQueue); supervisor.runInternal(); verifyAll(); @@ -1604,17 +1592,17 @@ public class KafkaSupervisorTest extends EasyMockSupport addSomeEvents(100); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null ) ).anyTimes(); - expect(taskQueue.add(capture(captured))).andReturn(true).times(4); - taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); + EasyMock.expect(taskQueue.add(EasyMock.capture(captured))).andReturn(true).times(4); + taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); replayAll(); supervisor.start(); @@ -1633,43 +1621,43 @@ public class KafkaSupervisorTest extends EasyMockSupport checkpoints1.put(0, ImmutableMap.of(0, 0L, 2, 0L)); TreeMap> checkpoints2 = new TreeMap<>(); checkpoints2.put(0, ImmutableMap.of(1, 0L)); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-0"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints1)) - .times(2); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-1"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints2)) - .times(2); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-0"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints1)) + .times(2); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-1"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints2)) + .times(2); captured = Capture.newInstance(CaptureType.ALL); - expect(taskStorage.getActiveTasks()).andReturn(tasks).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(tasks).anyTimes(); for (Task task : tasks) { - expect(taskStorage.getStatus(task.getId())) - .andReturn(Optional.of(TaskStatus.running(task.getId()))) - .anyTimes(); - expect(taskStorage.getTask(task.getId())).andReturn(Optional.of(task)).anyTimes(); + EasyMock.expect(taskStorage.getStatus(task.getId())) + .andReturn(Optional.of(TaskStatus.running(task.getId()))) + .anyTimes(); + EasyMock.expect(taskStorage.getTask(task.getId())).andReturn(Optional.of(task)).anyTimes(); } - expect(taskRunner.getRunningTasks()).andReturn(workItems).anyTimes(); - expect(taskClient.getStatusAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(Status.READING)) - .anyTimes(); - expect(taskClient.getStartTimeAsync(EasyMock.contains("sequenceName-0"))) - .andReturn(Futures.immediateFuture(DateTimes.nowUtc().minusMinutes(2))) - .andReturn(Futures.immediateFuture(DateTimes.nowUtc())); - expect(taskClient.getStartTimeAsync(EasyMock.contains("sequenceName-1"))) - .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) - .times(2); - expect(taskClient.pauseAsync(EasyMock.contains("sequenceName-0"))) - .andReturn(Futures.immediateFailedFuture(new RuntimeException())).times(2); + EasyMock.expect(taskRunner.getRunningTasks()).andReturn(workItems).anyTimes(); + EasyMock.expect(taskClient.getStatusAsync(EasyMock.anyString())) + .andReturn(Futures.immediateFuture(Status.READING)) + .anyTimes(); + EasyMock.expect(taskClient.getStartTimeAsync(EasyMock.contains("sequenceName-0"))) + .andReturn(Futures.immediateFuture(DateTimes.nowUtc().minusMinutes(2))) + .andReturn(Futures.immediateFuture(DateTimes.nowUtc())); + EasyMock.expect(taskClient.getStartTimeAsync(EasyMock.contains("sequenceName-1"))) + .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) + .times(2); + EasyMock.expect(taskClient.pauseAsync(EasyMock.contains("sequenceName-0"))) + .andReturn(Futures.immediateFailedFuture(new RuntimeException())).times(2); taskQueue.shutdown( EasyMock.contains("sequenceName-0"), EasyMock.eq("An exception occured while waiting for task [%s] to pause: [%s]"), EasyMock.contains("sequenceName-0"), EasyMock.anyString() ); - expectLastCall().times(2); - expect(taskQueue.add(capture(captured))).andReturn(true).times(2); + EasyMock.expectLastCall().times(2); + EasyMock.expect(taskQueue.add(EasyMock.capture(captured))).andReturn(true).times(2); - replay(taskStorage, taskRunner, taskClient, taskQueue); + EasyMock.replay(taskStorage, taskRunner, taskClient, taskQueue); supervisor.runInternal(); verifyAll(); @@ -1690,17 +1678,17 @@ public class KafkaSupervisorTest extends EasyMockSupport addSomeEvents(100); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null ) ).anyTimes(); - expect(taskQueue.add(capture(captured))).andReturn(true).times(4); - taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); + EasyMock.expect(taskQueue.add(EasyMock.capture(captured))).andReturn(true).times(4); + taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); replayAll(); supervisor.start(); @@ -1719,35 +1707,35 @@ public class KafkaSupervisorTest extends EasyMockSupport checkpoints1.put(0, ImmutableMap.of(0, 0L, 2, 0L)); TreeMap> checkpoints2 = new TreeMap<>(); checkpoints2.put(0, ImmutableMap.of(1, 0L)); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-0"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints1)) - .times(2); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-1"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints2)) - .times(2); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-0"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints1)) + .times(2); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("sequenceName-1"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints2)) + .times(2); captured = Capture.newInstance(CaptureType.ALL); - expect(taskStorage.getActiveTasks()).andReturn(tasks).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(tasks).anyTimes(); for (Task task : tasks) { - expect(taskStorage.getStatus(task.getId())) - .andReturn(Optional.of(TaskStatus.running(task.getId()))) - .anyTimes(); - expect(taskStorage.getTask(task.getId())).andReturn(Optional.of(task)).anyTimes(); + EasyMock.expect(taskStorage.getStatus(task.getId())) + .andReturn(Optional.of(TaskStatus.running(task.getId()))) + .anyTimes(); + EasyMock.expect(taskStorage.getTask(task.getId())).andReturn(Optional.of(task)).anyTimes(); } - expect(taskRunner.getRunningTasks()).andReturn(workItems).anyTimes(); - expect(taskClient.getStatusAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(Status.READING)) - .anyTimes(); - expect(taskClient.getStartTimeAsync(EasyMock.contains("sequenceName-0"))) - .andReturn(Futures.immediateFuture(DateTimes.nowUtc().minusMinutes(2))) - .andReturn(Futures.immediateFuture(DateTimes.nowUtc())); - expect(taskClient.getStartTimeAsync(EasyMock.contains("sequenceName-1"))) - .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) - .times(2); - expect(taskClient.pauseAsync(EasyMock.contains("sequenceName-0"))) - .andReturn(Futures.immediateFuture((Map) ImmutableMap.of(0, 10L, 1, 20L, 2, 30L))) - .andReturn(Futures.immediateFuture((Map) ImmutableMap.of(0, 10L, 1, 15L, 2, 35L))); - expect( + EasyMock.expect(taskRunner.getRunningTasks()).andReturn(workItems).anyTimes(); + EasyMock.expect(taskClient.getStatusAsync(EasyMock.anyString())) + .andReturn(Futures.immediateFuture(Status.READING)) + .anyTimes(); + EasyMock.expect(taskClient.getStartTimeAsync(EasyMock.contains("sequenceName-0"))) + .andReturn(Futures.immediateFuture(DateTimes.nowUtc().minusMinutes(2))) + .andReturn(Futures.immediateFuture(DateTimes.nowUtc())); + EasyMock.expect(taskClient.getStartTimeAsync(EasyMock.contains("sequenceName-1"))) + .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) + .times(2); + EasyMock.expect(taskClient.pauseAsync(EasyMock.contains("sequenceName-0"))) + .andReturn(Futures.immediateFuture(ImmutableMap.of(0, 10L, 1, 20L, 2, 30L))) + .andReturn(Futures.immediateFuture(ImmutableMap.of(0, 10L, 1, 15L, 2, 35L))); + EasyMock.expect( taskClient.setEndOffsetsAsync( EasyMock.contains("sequenceName-0"), EasyMock.eq(ImmutableMap.of(0, 10L, 1, 20L, 2, 35L)), @@ -1759,10 +1747,10 @@ public class KafkaSupervisorTest extends EasyMockSupport EasyMock.eq("Task [%s] failed to respond to [set end offsets] in a timely manner, killing task"), EasyMock.contains("sequenceName-0") ); - expectLastCall().times(2); - expect(taskQueue.add(capture(captured))).andReturn(true).times(2); + EasyMock.expectLastCall().times(2); + EasyMock.expect(taskQueue.add(EasyMock.capture(captured))).andReturn(true).times(2); - replay(taskStorage, taskRunner, taskClient, taskQueue); + EasyMock.replay(taskStorage, taskRunner, taskClient, taskQueue); supervisor.runInternal(); verifyAll(); @@ -1784,7 +1772,7 @@ public class KafkaSupervisorTest extends EasyMockSupport @Test public void testStop() { - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); taskClient.close(); taskRunner.unregisterListener(StringUtils.format("KafkaSupervisor-%s", DATASOURCE)); replayAll(); @@ -1849,39 +1837,39 @@ public class KafkaSupervisorTest extends EasyMockSupport workItems.add(new TestTaskRunnerWorkItem(id1, null, location1)); workItems.add(new TestTaskRunnerWorkItem(id2, null, location2)); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskRunner.getRunningTasks()).andReturn(workItems).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(id1, id2, id3)).anyTimes(); - expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); - expect(taskStorage.getStatus("id2")).andReturn(Optional.of(TaskStatus.running("id2"))).anyTimes(); - expect(taskStorage.getStatus("id3")).andReturn(Optional.of(TaskStatus.running("id3"))).anyTimes(); - expect(taskStorage.getTask("id1")).andReturn(Optional.of(id1)).anyTimes(); - expect(taskStorage.getTask("id2")).andReturn(Optional.of(id2)).anyTimes(); - expect(taskStorage.getTask("id3")).andReturn(Optional.of(id3)).anyTimes(); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskRunner.getRunningTasks()).andReturn(workItems).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(id1, id2, id3)).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id2")).andReturn(Optional.of(TaskStatus.running("id2"))).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id3")).andReturn(Optional.of(TaskStatus.running("id3"))).anyTimes(); + EasyMock.expect(taskStorage.getTask("id1")).andReturn(Optional.of(id1)).anyTimes(); + EasyMock.expect(taskStorage.getTask("id2")).andReturn(Optional.of(id2)).anyTimes(); + EasyMock.expect(taskStorage.getTask("id3")).andReturn(Optional.of(id3)).anyTimes(); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null ) ).anyTimes(); - expect(taskClient.getStatusAsync("id1")).andReturn(Futures.immediateFuture(Status.PUBLISHING)); - expect(taskClient.getStatusAsync("id2")).andReturn(Futures.immediateFuture(Status.READING)); - expect(taskClient.getStatusAsync("id3")).andReturn(Futures.immediateFuture(Status.READING)); - expect(taskClient.getStartTimeAsync("id2")).andReturn(Futures.immediateFuture(startTime)); - expect(taskClient.getStartTimeAsync("id3")).andReturn(Futures.immediateFuture(startTime)); - expect(taskClient.getEndOffsets("id1")).andReturn(ImmutableMap.of(0, 10L, 1, 20L, 2, 30L)); + EasyMock.expect(taskClient.getStatusAsync("id1")).andReturn(Futures.immediateFuture(Status.PUBLISHING)); + EasyMock.expect(taskClient.getStatusAsync("id2")).andReturn(Futures.immediateFuture(Status.READING)); + EasyMock.expect(taskClient.getStatusAsync("id3")).andReturn(Futures.immediateFuture(Status.READING)); + EasyMock.expect(taskClient.getStartTimeAsync("id2")).andReturn(Futures.immediateFuture(startTime)); + EasyMock.expect(taskClient.getStartTimeAsync("id3")).andReturn(Futures.immediateFuture(startTime)); + EasyMock.expect(taskClient.getEndOffsets("id1")).andReturn(ImmutableMap.of(0, 10L, 1, 20L, 2, 30L)); // getCheckpoints will not be called for id1 as it is in publishing state TreeMap> checkpoints = new TreeMap<>(); checkpoints.put(0, ImmutableMap.of(0, 10L, 1, 20L, 2, 30L)); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("id2"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints)) - .times(1); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("id3"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints)) - .times(1); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("id2"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints)) + .times(1); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("id3"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints)) + .times(1); - taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); + taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); replayAll(); supervisor.start(); @@ -1889,30 +1877,30 @@ public class KafkaSupervisorTest extends EasyMockSupport verifyAll(); EasyMock.reset(taskRunner, taskClient, taskQueue); - expect(taskRunner.getRunningTasks()).andReturn(workItems).anyTimes(); - expect(taskClient.pauseAsync("id2")) - .andReturn(Futures.immediateFuture((Map) ImmutableMap.of(0, 15L, 1, 25L, 2, 30L))); - expect(taskClient.setEndOffsetsAsync("id2", ImmutableMap.of(0, 15L, 1, 25L, 2, 30L), true)) - .andReturn(Futures.immediateFuture(true)); + EasyMock.expect(taskRunner.getRunningTasks()).andReturn(workItems).anyTimes(); + EasyMock.expect(taskClient.pauseAsync("id2")) + .andReturn(Futures.immediateFuture(ImmutableMap.of(0, 15L, 1, 25L, 2, 30L))); + EasyMock.expect(taskClient.setEndOffsetsAsync("id2", ImmutableMap.of(0, 15L, 1, 25L, 2, 30L), true)) + .andReturn(Futures.immediateFuture(true)); taskQueue.shutdown("id3", "Killing task for graceful shutdown"); - expectLastCall().times(1); + EasyMock.expectLastCall().times(1); taskQueue.shutdown("id3", "Killing task [%s] which hasn't been assigned to a worker", "id3"); - expectLastCall().times(1); + EasyMock.expectLastCall().times(1); - replay(taskRunner, taskClient, taskQueue); + EasyMock.replay(taskRunner, taskClient, taskQueue); supervisor.gracefulShutdownInternal(); verifyAll(); } @Test - public void testResetNoTasks() throws Exception + public void testResetNoTasks() { - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); replayAll(); supervisor = getTestableSupervisor(1, 1, true, "PT1H", null, null); @@ -1921,8 +1909,8 @@ public class KafkaSupervisorTest extends EasyMockSupport verifyAll(); EasyMock.reset(indexerMetadataStorageCoordinator); - expect(indexerMetadataStorageCoordinator.deleteDataSourceMetadata(DATASOURCE)).andReturn(true); - replay(indexerMetadataStorageCoordinator); + EasyMock.expect(indexerMetadataStorageCoordinator.deleteDataSourceMetadata(DATASOURCE)).andReturn(true); + EasyMock.replay(indexerMetadataStorageCoordinator); supervisor.resetInternal(null); verifyAll(); @@ -1933,11 +1921,11 @@ public class KafkaSupervisorTest extends EasyMockSupport public void testResetDataSourceMetadata() throws Exception { supervisor = getTestableSupervisor(1, 1, true, "PT1H", null, null); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); replayAll(); supervisor.start(); @@ -1963,13 +1951,13 @@ public class KafkaSupervisorTest extends EasyMockSupport new SeekableStreamStartSequenceNumbers<>(topic, ImmutableMap.of(0, 1000L), ImmutableSet.of())); EasyMock.reset(indexerMetadataStorageCoordinator); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)) - .andReturn(kafkaDataSourceMetadata); - expect(indexerMetadataStorageCoordinator.resetDataSourceMetadata( - capture(captureDataSource), - capture(captureDataSourceMetadata) + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)) + .andReturn(kafkaDataSourceMetadata); + EasyMock.expect(indexerMetadataStorageCoordinator.resetDataSourceMetadata( + EasyMock.capture(captureDataSource), + EasyMock.capture(captureDataSourceMetadata) )).andReturn(true); - replay(indexerMetadataStorageCoordinator); + EasyMock.replay(indexerMetadataStorageCoordinator); try { supervisor.resetInternal(resetMetadata); @@ -1977,23 +1965,23 @@ public class KafkaSupervisorTest extends EasyMockSupport catch (NullPointerException npe) { // Expected as there will be an attempt to EasyMock.reset partitionGroups offsets to NOT_SET // however there would be no entries in the map as we have not put nay data in kafka - Assert.assertTrue(npe.getCause() == null); + Assert.assertNull(npe.getCause()); } verifyAll(); - Assert.assertEquals(captureDataSource.getValue(), DATASOURCE); - Assert.assertEquals(captureDataSourceMetadata.getValue(), expectedMetadata); + Assert.assertEquals(DATASOURCE, captureDataSource.getValue()); + Assert.assertEquals(expectedMetadata, captureDataSourceMetadata.getValue()); } @Test - public void testResetNoDataSourceMetadata() throws Exception + public void testResetNoDataSourceMetadata() { supervisor = getTestableSupervisor(1, 1, true, "PT1H", null, null); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); replayAll(); supervisor.start(); @@ -2010,8 +1998,8 @@ public class KafkaSupervisorTest extends EasyMockSupport EasyMock.reset(indexerMetadataStorageCoordinator); // no DataSourceMetadata in metadata store - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn(null); - replay(indexerMetadataStorageCoordinator); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn(null); + EasyMock.replay(indexerMetadataStorageCoordinator); supervisor.resetInternal(resetMetadata); verifyAll(); @@ -2070,38 +2058,38 @@ public class KafkaSupervisorTest extends EasyMockSupport workItems.add(new TestTaskRunnerWorkItem(id1, null, location1)); workItems.add(new TestTaskRunnerWorkItem(id2, null, location2)); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskRunner.getRunningTasks()).andReturn(workItems).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(id1, id2, id3)).anyTimes(); - expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); - expect(taskStorage.getStatus("id2")).andReturn(Optional.of(TaskStatus.running("id2"))).anyTimes(); - expect(taskStorage.getStatus("id3")).andReturn(Optional.of(TaskStatus.running("id3"))).anyTimes(); - expect(taskStorage.getTask("id1")).andReturn(Optional.of(id1)).anyTimes(); - expect(taskStorage.getTask("id2")).andReturn(Optional.of(id2)).anyTimes(); - expect(taskStorage.getTask("id3")).andReturn(Optional.of(id3)).anyTimes(); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskRunner.getRunningTasks()).andReturn(workItems).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(id1, id2, id3)).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id2")).andReturn(Optional.of(TaskStatus.running("id2"))).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id3")).andReturn(Optional.of(TaskStatus.running("id3"))).anyTimes(); + EasyMock.expect(taskStorage.getTask("id1")).andReturn(Optional.of(id1)).anyTimes(); + EasyMock.expect(taskStorage.getTask("id2")).andReturn(Optional.of(id2)).anyTimes(); + EasyMock.expect(taskStorage.getTask("id3")).andReturn(Optional.of(id3)).anyTimes(); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null ) ).anyTimes(); - expect(taskClient.getStatusAsync("id1")).andReturn(Futures.immediateFuture(Status.PUBLISHING)); - expect(taskClient.getStatusAsync("id2")).andReturn(Futures.immediateFuture(Status.READING)); - expect(taskClient.getStatusAsync("id3")).andReturn(Futures.immediateFuture(Status.READING)); - expect(taskClient.getStartTimeAsync("id2")).andReturn(Futures.immediateFuture(startTime)); - expect(taskClient.getStartTimeAsync("id3")).andReturn(Futures.immediateFuture(startTime)); - expect(taskClient.getEndOffsets("id1")).andReturn(ImmutableMap.of(0, 10L, 1, 20L, 2, 30L)); + EasyMock.expect(taskClient.getStatusAsync("id1")).andReturn(Futures.immediateFuture(Status.PUBLISHING)); + EasyMock.expect(taskClient.getStatusAsync("id2")).andReturn(Futures.immediateFuture(Status.READING)); + EasyMock.expect(taskClient.getStatusAsync("id3")).andReturn(Futures.immediateFuture(Status.READING)); + EasyMock.expect(taskClient.getStartTimeAsync("id2")).andReturn(Futures.immediateFuture(startTime)); + EasyMock.expect(taskClient.getStartTimeAsync("id3")).andReturn(Futures.immediateFuture(startTime)); + EasyMock.expect(taskClient.getEndOffsets("id1")).andReturn(ImmutableMap.of(0, 10L, 1, 20L, 2, 30L)); TreeMap> checkpoints = new TreeMap<>(); checkpoints.put(0, ImmutableMap.of(0, 10L, 1, 20L, 2, 30L)); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("id2"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints)) - .times(1); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("id3"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints)) - .times(1); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("id2"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints)) + .times(1); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("id3"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints)) + .times(1); - taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); + taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); replayAll(); supervisor.start(); @@ -2109,17 +2097,17 @@ public class KafkaSupervisorTest extends EasyMockSupport verifyAll(); EasyMock.reset(taskQueue, indexerMetadataStorageCoordinator); - expect(indexerMetadataStorageCoordinator.deleteDataSourceMetadata(DATASOURCE)).andReturn(true); + EasyMock.expect(indexerMetadataStorageCoordinator.deleteDataSourceMetadata(DATASOURCE)).andReturn(true); taskQueue.shutdown("id2", "DataSourceMetadata is not found while reset"); taskQueue.shutdown("id3", "DataSourceMetadata is not found while reset"); - replay(taskQueue, indexerMetadataStorageCoordinator); + EasyMock.replay(taskQueue, indexerMetadataStorageCoordinator); supervisor.resetInternal(null); verifyAll(); } @Test - public void testNoDataIngestionTasks() throws Exception + public void testNoDataIngestionTasks() { final DateTime startTime = DateTimes.nowUtc(); supervisor = getTestableSupervisor(2, 1, true, "PT1S", null, null); @@ -2165,40 +2153,40 @@ public class KafkaSupervisorTest extends EasyMockSupport null ); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(id1, id2, id3)).anyTimes(); - expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); - expect(taskStorage.getStatus("id2")).andReturn(Optional.of(TaskStatus.running("id2"))).anyTimes(); - expect(taskStorage.getStatus("id3")).andReturn(Optional.of(TaskStatus.running("id3"))).anyTimes(); - expect(taskStorage.getTask("id1")).andReturn(Optional.of(id1)).anyTimes(); - expect(taskStorage.getTask("id2")).andReturn(Optional.of(id2)).anyTimes(); - expect(taskStorage.getTask("id3")).andReturn(Optional.of(id3)).anyTimes(); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(id1, id2, id3)).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id2")).andReturn(Optional.of(TaskStatus.running("id2"))).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id3")).andReturn(Optional.of(TaskStatus.running("id3"))).anyTimes(); + EasyMock.expect(taskStorage.getTask("id1")).andReturn(Optional.of(id1)).anyTimes(); + EasyMock.expect(taskStorage.getTask("id2")).andReturn(Optional.of(id2)).anyTimes(); + EasyMock.expect(taskStorage.getTask("id3")).andReturn(Optional.of(id3)).anyTimes(); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null ) ).anyTimes(); - expect(taskClient.getStatusAsync("id1")).andReturn(Futures.immediateFuture(Status.READING)); - expect(taskClient.getStatusAsync("id2")).andReturn(Futures.immediateFuture(Status.READING)); - expect(taskClient.getStatusAsync("id3")).andReturn(Futures.immediateFuture(Status.READING)); - expect(taskClient.getStartTimeAsync("id1")).andReturn(Futures.immediateFuture(startTime)); - expect(taskClient.getStartTimeAsync("id2")).andReturn(Futures.immediateFuture(startTime)); - expect(taskClient.getStartTimeAsync("id3")).andReturn(Futures.immediateFuture(startTime)); + EasyMock.expect(taskClient.getStatusAsync("id1")).andReturn(Futures.immediateFuture(Status.READING)); + EasyMock.expect(taskClient.getStatusAsync("id2")).andReturn(Futures.immediateFuture(Status.READING)); + EasyMock.expect(taskClient.getStatusAsync("id3")).andReturn(Futures.immediateFuture(Status.READING)); + EasyMock.expect(taskClient.getStartTimeAsync("id1")).andReturn(Futures.immediateFuture(startTime)); + EasyMock.expect(taskClient.getStartTimeAsync("id2")).andReturn(Futures.immediateFuture(startTime)); + EasyMock.expect(taskClient.getStartTimeAsync("id3")).andReturn(Futures.immediateFuture(startTime)); TreeMap> checkpoints = new TreeMap<>(); checkpoints.put(0, ImmutableMap.of(0, 10L, 1, 20L, 2, 30L)); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("id1"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints)) - .times(1); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("id2"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints)) - .times(1); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("id3"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints)) - .times(1); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("id1"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints)) + .times(1); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("id2"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints)) + .times(1); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("id3"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints)) + .times(1); - taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); + taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); replayAll(); supervisor.start(); @@ -2206,11 +2194,11 @@ public class KafkaSupervisorTest extends EasyMockSupport verifyAll(); EasyMock.reset(taskQueue, indexerMetadataStorageCoordinator); - expect(indexerMetadataStorageCoordinator.deleteDataSourceMetadata(DATASOURCE)).andReturn(true); + EasyMock.expect(indexerMetadataStorageCoordinator.deleteDataSourceMetadata(DATASOURCE)).andReturn(true); taskQueue.shutdown("id1", "DataSourceMetadata is not found while reset"); taskQueue.shutdown("id2", "DataSourceMetadata is not found while reset"); taskQueue.shutdown("id3", "DataSourceMetadata is not found while reset"); - replay(taskQueue, indexerMetadataStorageCoordinator); + EasyMock.replay(taskQueue, indexerMetadataStorageCoordinator); supervisor.resetInternal(null); verifyAll(); @@ -2218,13 +2206,13 @@ public class KafkaSupervisorTest extends EasyMockSupport @Test(timeout = 60_000L) public void testCheckpointForInactiveTaskGroup() - throws InterruptedException, ExecutionException, TimeoutException, JsonProcessingException + throws InterruptedException { supervisor = getTestableSupervisor(2, 1, true, "PT1S", null, null); supervisor.getStateManager().markRunFinished(); //not adding any events - final Task id1 = createKafkaIndexTask( + final KafkaIndexTask id1 = createKafkaIndexTask( "id1", DATASOURCE, 0, @@ -2270,41 +2258,41 @@ public class KafkaSupervisorTest extends EasyMockSupport workItems.add(new TestTaskRunnerWorkItem(id2, null, location2)); workItems.add(new TestTaskRunnerWorkItem(id2, null, location2)); - expect(taskRunner.getRunningTasks()).andReturn(workItems).anyTimes(); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(id1, id2, id3)).anyTimes(); - expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); - expect(taskStorage.getStatus("id2")).andReturn(Optional.of(TaskStatus.running("id2"))).anyTimes(); - expect(taskStorage.getStatus("id3")).andReturn(Optional.of(TaskStatus.running("id3"))).anyTimes(); - expect(taskStorage.getTask("id1")).andReturn(Optional.of(id1)).anyTimes(); - expect(taskStorage.getTask("id2")).andReturn(Optional.of(id2)).anyTimes(); - expect(taskStorage.getTask("id3")).andReturn(Optional.of(id3)).anyTimes(); - expect( + EasyMock.expect(taskRunner.getRunningTasks()).andReturn(workItems).anyTimes(); + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(id1, id2, id3)).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id2")).andReturn(Optional.of(TaskStatus.running("id2"))).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id3")).andReturn(Optional.of(TaskStatus.running("id3"))).anyTimes(); + EasyMock.expect(taskStorage.getTask("id1")).andReturn(Optional.of(id1)).anyTimes(); + EasyMock.expect(taskStorage.getTask("id2")).andReturn(Optional.of(id2)).anyTimes(); + EasyMock.expect(taskStorage.getTask("id3")).andReturn(Optional.of(id3)).anyTimes(); + EasyMock.expect( indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn(new KafkaDataSourceMetadata(null) ).anyTimes(); - expect(taskClient.getStatusAsync("id1")).andReturn(Futures.immediateFuture(Status.READING)); - expect(taskClient.getStatusAsync("id2")).andReturn(Futures.immediateFuture(Status.READING)); - expect(taskClient.getStatusAsync("id3")).andReturn(Futures.immediateFuture(Status.READING)); + EasyMock.expect(taskClient.getStatusAsync("id1")).andReturn(Futures.immediateFuture(Status.READING)); + EasyMock.expect(taskClient.getStatusAsync("id2")).andReturn(Futures.immediateFuture(Status.READING)); + EasyMock.expect(taskClient.getStatusAsync("id3")).andReturn(Futures.immediateFuture(Status.READING)); final DateTime startTime = DateTimes.nowUtc(); - expect(taskClient.getStartTimeAsync("id1")).andReturn(Futures.immediateFuture(startTime)); - expect(taskClient.getStartTimeAsync("id2")).andReturn(Futures.immediateFuture(startTime)); - expect(taskClient.getStartTimeAsync("id3")).andReturn(Futures.immediateFuture(startTime)); + EasyMock.expect(taskClient.getStartTimeAsync("id1")).andReturn(Futures.immediateFuture(startTime)); + EasyMock.expect(taskClient.getStartTimeAsync("id2")).andReturn(Futures.immediateFuture(startTime)); + EasyMock.expect(taskClient.getStartTimeAsync("id3")).andReturn(Futures.immediateFuture(startTime)); final TreeMap> checkpoints = new TreeMap<>(); checkpoints.put(0, ImmutableMap.of(0, 10L, 1, 20L, 2, 30L)); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("id1"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints)) - .times(1); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("id2"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints)) - .times(1); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("id3"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints)) - .times(1); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("id1"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints)) + .times(1); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("id2"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints)) + .times(1); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("id3"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints)) + .times(1); - taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); + taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); replayAll(); supervisor.start(); @@ -2314,7 +2302,7 @@ public class KafkaSupervisorTest extends EasyMockSupport supervisor.moveTaskGroupToPendingCompletion(0); supervisor.checkpoint( 0, - ((KafkaIndexTask) id1).getIOConfig().getBaseSequenceName(), + id1.getIOConfig().getBaseSequenceName(), new KafkaDataSourceMetadata(new SeekableStreamStartSequenceNumbers<>( topic, checkpoints.get(0), @@ -2344,7 +2332,7 @@ public class KafkaSupervisorTest extends EasyMockSupport { supervisor = getTestableSupervisor(2, 1, true, "PT1S", null, null); //not adding any events - final Task id1 = createKafkaIndexTask( + final KafkaIndexTask id1 = createKafkaIndexTask( "id1", DATASOURCE, 0, @@ -2383,16 +2371,16 @@ public class KafkaSupervisorTest extends EasyMockSupport null ); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(id1, id2, id3)).anyTimes(); - expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); - expect(taskStorage.getStatus("id2")).andReturn(Optional.of(TaskStatus.running("id2"))).anyTimes(); - expect(taskStorage.getStatus("id3")).andReturn(Optional.of(TaskStatus.running("id3"))).anyTimes(); - expect(taskStorage.getTask("id1")).andReturn(Optional.of(id1)).anyTimes(); - expect(taskStorage.getTask("id2")).andReturn(Optional.of(id2)).anyTimes(); - expect(taskStorage.getTask("id3")).andReturn(Optional.of(id3)).anyTimes(); - expect( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(id1, id2, id3)).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id2")).andReturn(Optional.of(TaskStatus.running("id2"))).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id3")).andReturn(Optional.of(TaskStatus.running("id3"))).anyTimes(); + EasyMock.expect(taskStorage.getTask("id1")).andReturn(Optional.of(id1)).anyTimes(); + EasyMock.expect(taskStorage.getTask("id2")).andReturn(Optional.of(id2)).anyTimes(); + EasyMock.expect(taskStorage.getTask("id3")).andReturn(Optional.of(id3)).anyTimes(); + EasyMock.expect( indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn(new KafkaDataSourceMetadata(null) ).anyTimes(); @@ -2402,7 +2390,7 @@ public class KafkaSupervisorTest extends EasyMockSupport supervisor.checkpoint( 0, - ((KafkaIndexTask) id1).getIOConfig().getBaseSequenceName(), + id1.getIOConfig().getBaseSequenceName(), new KafkaDataSourceMetadata(new SeekableStreamStartSequenceNumbers<>( topic, Collections.emptyMap(), @@ -2437,13 +2425,13 @@ public class KafkaSupervisorTest extends EasyMockSupport @Test(timeout = 60_000L) public void testCheckpointWithNullTaskGroupId() - throws InterruptedException, ExecutionException, TimeoutException, JsonProcessingException + throws InterruptedException { supervisor = getTestableSupervisor(1, 3, true, "PT1S", null, null); supervisor.getStateManager().markRunFinished(); //not adding any events - final Task id1 = createKafkaIndexTask( + final KafkaIndexTask id1 = createKafkaIndexTask( "id1", DATASOURCE, 0, @@ -2473,40 +2461,40 @@ public class KafkaSupervisorTest extends EasyMockSupport null ); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(id1, id2, id3)).anyTimes(); - expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); - expect(taskStorage.getStatus("id2")).andReturn(Optional.of(TaskStatus.running("id2"))).anyTimes(); - expect(taskStorage.getStatus("id3")).andReturn(Optional.of(TaskStatus.running("id3"))).anyTimes(); - expect(taskStorage.getTask("id1")).andReturn(Optional.of(id1)).anyTimes(); - expect(taskStorage.getTask("id2")).andReturn(Optional.of(id2)).anyTimes(); - expect(taskStorage.getTask("id3")).andReturn(Optional.of(id3)).anyTimes(); - expect( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(id1, id2, id3)).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id2")).andReturn(Optional.of(TaskStatus.running("id2"))).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id3")).andReturn(Optional.of(TaskStatus.running("id3"))).anyTimes(); + EasyMock.expect(taskStorage.getTask("id1")).andReturn(Optional.of(id1)).anyTimes(); + EasyMock.expect(taskStorage.getTask("id2")).andReturn(Optional.of(id2)).anyTimes(); + EasyMock.expect(taskStorage.getTask("id3")).andReturn(Optional.of(id3)).anyTimes(); + EasyMock.expect( indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn(new KafkaDataSourceMetadata(null) ).anyTimes(); - taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); - expect(taskClient.getStatusAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(Status.READING)) - .anyTimes(); + taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); + EasyMock.expect(taskClient.getStatusAsync(EasyMock.anyString())) + .andReturn(Futures.immediateFuture(Status.READING)) + .anyTimes(); final TreeMap> checkpoints = new TreeMap<>(); checkpoints.put(0, ImmutableMap.of(0, 0L)); - expect(taskClient.getCheckpointsAsync(EasyMock.anyString(), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints)) - .times(3); - expect(taskClient.getStartTimeAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) - .anyTimes(); - expect(taskClient.pauseAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(ImmutableMap.of(0, 10L))) - .anyTimes(); - expect(taskClient.setEndOffsetsAsync( + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.anyString(), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints)) + .times(3); + EasyMock.expect(taskClient.getStartTimeAsync(EasyMock.anyString())) + .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) + .anyTimes(); + EasyMock.expect(taskClient.pauseAsync(EasyMock.anyString())) + .andReturn(Futures.immediateFuture(ImmutableMap.of(0, 10L))) + .anyTimes(); + EasyMock.expect(taskClient.setEndOffsetsAsync( EasyMock.anyString(), EasyMock.eq(ImmutableMap.of(0, 10L)), EasyMock.anyBoolean() )) - .andReturn(Futures.immediateFuture(true)) - .anyTimes(); + .andReturn(Futures.immediateFuture(true)) + .anyTimes(); replayAll(); @@ -2518,7 +2506,7 @@ public class KafkaSupervisorTest extends EasyMockSupport newCheckpoints.put(0, ImmutableMap.of(0, 10L)); supervisor.checkpoint( null, - ((KafkaIndexTask) id1).getIOConfig().getBaseSequenceName(), + id1.getIOConfig().getBaseSequenceName(), new KafkaDataSourceMetadata(new SeekableStreamStartSequenceNumbers<>( topic, checkpoints.get(0), @@ -2542,20 +2530,20 @@ public class KafkaSupervisorTest extends EasyMockSupport supervisor = getTestableSupervisor(1, 1, true, "PT1H", null, null, true, kafkaHost); addSomeEvents(1); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null ) ).anyTimes(); // this asserts that taskQueue.add does not in fact get called because supervisor should be suspended - expect(taskQueue.add(anyObject())).andAnswer((IAnswer) () -> { + EasyMock.expect(taskQueue.add(EasyMock.anyObject())).andAnswer((IAnswer) () -> { Assert.fail(); return null; }).anyTimes(); - taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); + taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); replayAll(); supervisor.start(); @@ -2618,51 +2606,51 @@ public class KafkaSupervisorTest extends EasyMockSupport workItems.add(new TestTaskRunnerWorkItem(id1, null, location1)); workItems.add(new TestTaskRunnerWorkItem(id2, null, location2)); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskRunner.getRunningTasks()).andReturn(workItems).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(id1, id2, id3)).anyTimes(); - expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); - expect(taskStorage.getStatus("id2")).andReturn(Optional.of(TaskStatus.running("id2"))).anyTimes(); - expect(taskStorage.getStatus("id3")).andReturn(Optional.of(TaskStatus.running("id3"))).anyTimes(); - expect(taskStorage.getTask("id1")).andReturn(Optional.of(id1)).anyTimes(); - expect(taskStorage.getTask("id2")).andReturn(Optional.of(id2)).anyTimes(); - expect(taskStorage.getTask("id3")).andReturn(Optional.of(id3)).anyTimes(); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskRunner.getRunningTasks()).andReturn(workItems).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(id1, id2, id3)).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id2")).andReturn(Optional.of(TaskStatus.running("id2"))).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id3")).andReturn(Optional.of(TaskStatus.running("id3"))).anyTimes(); + EasyMock.expect(taskStorage.getTask("id1")).andReturn(Optional.of(id1)).anyTimes(); + EasyMock.expect(taskStorage.getTask("id2")).andReturn(Optional.of(id2)).anyTimes(); + EasyMock.expect(taskStorage.getTask("id3")).andReturn(Optional.of(id3)).anyTimes(); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null ) ).anyTimes(); - expect(taskClient.getStatusAsync("id1")) - .andReturn(Futures.immediateFuture(Status.PUBLISHING)); - expect(taskClient.getStatusAsync("id2")) - .andReturn(Futures.immediateFuture(Status.READING)); - expect(taskClient.getStatusAsync("id3")) - .andReturn(Futures.immediateFuture(Status.READING)); - expect(taskClient.getStartTimeAsync("id2")).andReturn(Futures.immediateFuture(startTime)); - expect(taskClient.getStartTimeAsync("id3")).andReturn(Futures.immediateFuture(startTime)); - expect(taskClient.getEndOffsets("id1")).andReturn(ImmutableMap.of(0, 10L, 1, 20L, 2, 30L)); + EasyMock.expect(taskClient.getStatusAsync("id1")) + .andReturn(Futures.immediateFuture(Status.PUBLISHING)); + EasyMock.expect(taskClient.getStatusAsync("id2")) + .andReturn(Futures.immediateFuture(Status.READING)); + EasyMock.expect(taskClient.getStatusAsync("id3")) + .andReturn(Futures.immediateFuture(Status.READING)); + EasyMock.expect(taskClient.getStartTimeAsync("id2")).andReturn(Futures.immediateFuture(startTime)); + EasyMock.expect(taskClient.getStartTimeAsync("id3")).andReturn(Futures.immediateFuture(startTime)); + EasyMock.expect(taskClient.getEndOffsets("id1")).andReturn(ImmutableMap.of(0, 10L, 1, 20L, 2, 30L)); // getCheckpoints will not be called for id1 as it is in publishing state TreeMap> checkpoints = new TreeMap<>(); checkpoints.put(0, ImmutableMap.of(0, 10L, 1, 20L, 2, 30L)); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("id2"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints)) - .times(1); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("id3"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints)) - .times(1); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("id2"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints)) + .times(1); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("id3"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints)) + .times(1); - taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); + taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); - expect(taskClient.pauseAsync("id2")) - .andReturn(Futures.immediateFuture(ImmutableMap.of(0, 15L, 1, 25L, 2, 30L))); - expect(taskClient.setEndOffsetsAsync("id2", ImmutableMap.of(0, 15L, 1, 25L, 2, 30L), true)) - .andReturn(Futures.immediateFuture(true)); + EasyMock.expect(taskClient.pauseAsync("id2")) + .andReturn(Futures.immediateFuture(ImmutableMap.of(0, 15L, 1, 25L, 2, 30L))); + EasyMock.expect(taskClient.setEndOffsetsAsync("id2", ImmutableMap.of(0, 15L, 1, 25L, 2, 30L), true)) + .andReturn(Futures.immediateFuture(true)); taskQueue.shutdown("id3", "Killing task for graceful shutdown"); - expectLastCall().times(1); + EasyMock.expectLastCall().times(1); taskQueue.shutdown("id3", "Killing task [%s] which hasn't been assigned to a worker", "id3"); - expectLastCall().times(1); + EasyMock.expectLastCall().times(1); replayAll(); supervisor.start(); @@ -2671,13 +2659,13 @@ public class KafkaSupervisorTest extends EasyMockSupport } @Test - public void testResetSuspended() throws Exception + public void testResetSuspended() { - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); replayAll(); supervisor = getTestableSupervisor(1, 1, true, "PT1H", null, null, true, kafkaHost); @@ -2686,8 +2674,8 @@ public class KafkaSupervisorTest extends EasyMockSupport verifyAll(); EasyMock.reset(indexerMetadataStorageCoordinator); - expect(indexerMetadataStorageCoordinator.deleteDataSourceMetadata(DATASOURCE)).andReturn(true); - replay(indexerMetadataStorageCoordinator); + EasyMock.expect(indexerMetadataStorageCoordinator.deleteDataSourceMetadata(DATASOURCE)).andReturn(true); + EasyMock.replay(indexerMetadataStorageCoordinator); supervisor.resetInternal(null); verifyAll(); @@ -2709,10 +2697,10 @@ public class KafkaSupervisorTest extends EasyMockSupport ); addSomeEvents(1); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null ) @@ -2735,16 +2723,16 @@ public class KafkaSupervisorTest extends EasyMockSupport resetAll(); Capture captured = Capture.newInstance(); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KafkaDataSourceMetadata( null ) ).anyTimes(); - expect(taskQueue.add(capture(captured))).andReturn(true); - taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); + EasyMock.expect(taskQueue.add(EasyMock.capture(captured))).andReturn(true); + taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); replayAll(); // Fix the bad hostname during the initialization retries and finish the supervisor start. @@ -2822,12 +2810,12 @@ public class KafkaSupervisorTest extends EasyMockSupport ImmutableSet.of() ); - expect(taskClient.getMovingAveragesAsync("task1")).andReturn(Futures.immediateFuture(ImmutableMap.of( + EasyMock.expect(taskClient.getMovingAveragesAsync("task1")).andReturn(Futures.immediateFuture(ImmutableMap.of( "prop1", "val1" ))).times(1); - expect(taskClient.getMovingAveragesAsync("task2")).andReturn(Futures.immediateFuture(ImmutableMap.of( + EasyMock.expect(taskClient.getMovingAveragesAsync("task2")).andReturn(Futures.immediateFuture(ImmutableMap.of( "prop2", "val2" ))).times(1); @@ -3528,7 +3516,7 @@ public class KafkaSupervisorTest extends EasyMockSupport private final TaskLocation location; private final String dataSource; - public TestTaskRunnerWorkItem(Task task, ListenableFuture result, TaskLocation location) + TestTaskRunnerWorkItem(Task task, ListenableFuture result, TaskLocation location) { super(task.getId(), result); this.taskType = task.getType(); @@ -3599,7 +3587,7 @@ public class KafkaSupervisorTest extends EasyMockSupport private static class TestableKafkaSupervisorWithCustomIsTaskCurrent extends TestableKafkaSupervisor { - private boolean isTaskCurrentReturn; + private final boolean isTaskCurrentReturn; public TestableKafkaSupervisorWithCustomIsTaskCurrent( TaskStorage taskStorage, diff --git a/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisIndexTaskClientTest.java b/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisIndexTaskClientTest.java index 9ac25f5de6e..cc488f40104 100644 --- a/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisIndexTaskClientTest.java +++ b/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisIndexTaskClientTest.java @@ -66,8 +66,6 @@ import java.util.Collections; import java.util.List; import java.util.Map; -import static org.easymock.EasyMock.expect; - @RunWith(Parameterized.class) public class KinesisIndexTaskClientTest extends EasyMockSupport @@ -86,7 +84,7 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport private static final long TEST_NUM_RETRIES = 0; private static final String URL_FORMATTER = "http://%s:%d/druid/worker/v1/chat/%s/%s"; - private int numThreads; + private final int numThreads; private HttpClient httpClient; private TaskInfoProvider taskInfoProvider; private FullResponseHolder responseHolder; @@ -115,20 +113,20 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport headers = createMock(HttpHeaders.class); client = new TestableKinesisIndexTaskClient(httpClient, objectMapper, taskInfoProvider); - expect(taskInfoProvider.getTaskLocation(TEST_ID)) - .andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT)) - .anyTimes(); - expect(taskInfoProvider.getTaskStatus(TEST_ID)) - .andReturn(Optional.of(TaskStatus.running(TEST_ID))) - .anyTimes(); + EasyMock.expect(taskInfoProvider.getTaskLocation(TEST_ID)) + .andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT)) + .anyTimes(); + EasyMock.expect(taskInfoProvider.getTaskStatus(TEST_ID)) + .andReturn(Optional.of(TaskStatus.running(TEST_ID))) + .anyTimes(); for (String testId : TEST_IDS) { - expect(taskInfoProvider.getTaskLocation(testId)) - .andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT)) - .anyTimes(); - expect(taskInfoProvider.getTaskStatus(testId)) - .andReturn(Optional.of(TaskStatus.running(testId))) - .anyTimes(); + EasyMock.expect(taskInfoProvider.getTaskLocation(testId)) + .andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT)) + .anyTimes(); + EasyMock.expect(taskInfoProvider.getTaskStatus(testId)) + .andReturn(Optional.of(TaskStatus.running(testId))) + .anyTimes(); } } @@ -142,10 +140,10 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport public void testNoTaskLocation() throws IOException { EasyMock.reset(taskInfoProvider); - expect(taskInfoProvider.getTaskLocation(TEST_ID)).andReturn(TaskLocation.unknown()).anyTimes(); - expect(taskInfoProvider.getTaskStatus(TEST_ID)) - .andReturn(Optional.of(TaskStatus.running(TEST_ID))) - .anyTimes(); + EasyMock.expect(taskInfoProvider.getTaskLocation(TEST_ID)).andReturn(TaskLocation.unknown()).anyTimes(); + EasyMock.expect(taskInfoProvider.getTaskStatus(TEST_ID)) + .andReturn(Optional.of(TaskStatus.running(TEST_ID))) + .anyTimes(); replayAll(); Assert.assertFalse(client.stop(TEST_ID, true)); @@ -169,12 +167,12 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport expectedException.expectMessage("Aborting request because task [test-id] is not runnable"); EasyMock.reset(taskInfoProvider); - expect(taskInfoProvider.getTaskLocation(TEST_ID)) - .andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT)) - .anyTimes(); - expect(taskInfoProvider.getTaskStatus(TEST_ID)) - .andReturn(Optional.of(TaskStatus.failure(TEST_ID))) - .anyTimes(); + EasyMock.expect(taskInfoProvider.getTaskLocation(TEST_ID)) + .andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT)) + .anyTimes(); + EasyMock.expect(taskInfoProvider.getTaskStatus(TEST_ID)) + .andReturn(Optional.of(TaskStatus.failure(TEST_ID))) + .anyTimes(); replayAll(); client.getCurrentOffsets(TEST_ID, true); @@ -187,9 +185,9 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport expectedException.expect(RuntimeException.class); expectedException.expectMessage("org.apache.druid.java.util.common.IOE: Received status [500] and content []"); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.INTERNAL_SERVER_ERROR).times(2); - expect(responseHolder.getContent()).andReturn(""); - expect( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.INTERNAL_SERVER_ERROR).times(2); + EasyMock.expect(responseHolder.getContent()).andReturn(""); + EasyMock.expect( httpClient.go( EasyMock.anyObject(Request.class), EasyMock.anyObject(FullResponseHandler.class), @@ -210,9 +208,9 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport expectedException.expect(IAE.class); expectedException.expectMessage("Received 400 Bad Request with body:"); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.BAD_REQUEST).times(2); - expect(responseHolder.getContent()).andReturn(""); - expect( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.BAD_REQUEST).times(2); + EasyMock.expect(responseHolder.getContent()).andReturn(""); + EasyMock.expect( httpClient.go( EasyMock.anyObject(Request.class), EasyMock.anyObject(FullResponseHandler.class), @@ -230,14 +228,14 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport @Test public void testTaskLocationMismatch() { - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(3) - .andReturn(HttpResponseStatus.OK); - expect(responseHolder.getResponse()).andReturn(response); - expect(responseHolder.getContent()).andReturn("").times(2) - .andReturn("{}"); - expect(response.headers()).andReturn(headers); - expect(headers.get("X-Druid-Task-Id")).andReturn("a-different-task-id"); - expect( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(3) + .andReturn(HttpResponseStatus.OK); + EasyMock.expect(responseHolder.getResponse()).andReturn(response); + EasyMock.expect(responseHolder.getContent()).andReturn("").times(2) + .andReturn("{}"); + EasyMock.expect(response.headers()).andReturn(headers); + EasyMock.expect(headers.get("X-Druid-Task-Id")).andReturn("a-different-task-id"); + EasyMock.expect( httpClient.go( EasyMock.anyObject(Request.class), EasyMock.anyObject(FullResponseHandler.class), @@ -258,9 +256,9 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport public void testGetCurrentOffsets() throws Exception { Capture captured = Capture.newInstance(); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK); - expect(responseHolder.getContent()).andReturn("{\"0\":1, \"1\":10}"); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK); + EasyMock.expect(responseHolder.getContent()).andReturn("{\"0\":1, \"1\":10}"); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -291,15 +289,15 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport client = new TestableKinesisIndexTaskClient(httpClient, objectMapper, taskInfoProvider, 3); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(6) - .andReturn(HttpResponseStatus.OK).times(1); - expect(responseHolder.getContent()).andReturn("").times(4) - .andReturn("{\"0\":1, \"1\":10}"); - expect(responseHolder.getResponse()).andReturn(response).times(2); - expect(response.headers()).andReturn(headers).times(2); - expect(headers.get("X-Druid-Task-Id")).andReturn(TEST_ID).times(2); + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(6) + .andReturn(HttpResponseStatus.OK).times(1); + EasyMock.expect(responseHolder.getContent()).andReturn("").times(4) + .andReturn("{\"0\":1, \"1\":10}"); + EasyMock.expect(responseHolder.getResponse()).andReturn(response).times(2); + EasyMock.expect(response.headers()).andReturn(headers).times(2); + EasyMock.expect(headers.get("X-Druid-Task-Id")).andReturn(TEST_ID).times(2); - expect(httpClient.go( + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -335,13 +333,13 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport client = new TestableKinesisIndexTaskClient(httpClient, objectMapper, taskInfoProvider, 2); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).anyTimes(); - expect(responseHolder.getContent()).andReturn("").anyTimes(); - expect(responseHolder.getResponse()).andReturn(response).anyTimes(); - expect(response.headers()).andReturn(headers).anyTimes(); - expect(headers.get("X-Druid-Task-Id")).andReturn(TEST_ID).anyTimes(); + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).anyTimes(); + EasyMock.expect(responseHolder.getContent()).andReturn("").anyTimes(); + EasyMock.expect(responseHolder.getResponse()).andReturn(response).anyTimes(); + EasyMock.expect(response.headers()).andReturn(headers).anyTimes(); + EasyMock.expect(headers.get("X-Druid-Task-Id")).andReturn(TEST_ID).anyTimes(); - expect( + EasyMock.expect( httpClient.go( EasyMock.anyObject(Request.class), EasyMock.anyObject(FullResponseHandler.class), @@ -358,9 +356,9 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport public void testGetEndOffsets() throws Exception { Capture captured = Capture.newInstance(); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK); - expect(responseHolder.getContent()).andReturn("{\"0\":1, \"1\":10}"); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK); + EasyMock.expect(responseHolder.getContent()).andReturn("{\"0\":1, \"1\":10}"); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -392,13 +390,13 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport DateTime now = DateTimes.nowUtc(); Capture captured = Capture.newInstance(); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(3) - .andReturn(HttpResponseStatus.OK); - expect(responseHolder.getResponse()).andReturn(response); - expect(response.headers()).andReturn(headers); - expect(headers.get("X-Druid-Task-Id")).andReturn(null); - expect(responseHolder.getContent()).andReturn(String.valueOf(now.getMillis())).anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(3) + .andReturn(HttpResponseStatus.OK); + EasyMock.expect(responseHolder.getResponse()).andReturn(response); + EasyMock.expect(response.headers()).andReturn(headers); + EasyMock.expect(headers.get("X-Druid-Task-Id")).andReturn(null); + EasyMock.expect(responseHolder.getContent()).andReturn(String.valueOf(now.getMillis())).anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -427,9 +425,9 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport Status status = Status.READING; Capture captured = Capture.newInstance(); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK); - expect(responseHolder.getContent()).andReturn(StringUtils.format("\"%s\"", status.toString())).anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK); + EasyMock.expect(responseHolder.getContent()).andReturn(StringUtils.format("\"%s\"", status.toString())).anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -456,9 +454,9 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport public void testPause() throws Exception { Capture captured = Capture.newInstance(); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).times(2); - expect(responseHolder.getContent()).andReturn("{\"0\":1, \"1\":10}").anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).times(2); + EasyMock.expect(responseHolder.getContent()).andReturn("{\"0\":1, \"1\":10}").anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -489,25 +487,25 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport Capture captured = Capture.newInstance(); Capture captured2 = Capture.newInstance(); Capture captured3 = Capture.newInstance(); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.ACCEPTED).times(2) - .andReturn(HttpResponseStatus.OK).anyTimes(); - expect(responseHolder.getContent()).andReturn("\"PAUSED\"").times(2) - .andReturn("{\"0\":1, \"1\":10}").anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.ACCEPTED).times(2) + .andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(responseHolder.getContent()).andReturn("\"PAUSED\"").times(2) + .andReturn("{\"0\":1, \"1\":10}").anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) )).andReturn( Futures.immediateFuture(responseHolder) ); - expect(httpClient.go( + EasyMock.expect(httpClient.go( EasyMock.capture(captured2), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) )).andReturn( Futures.immediateFuture(responseHolder) ); - expect(httpClient.go( + EasyMock.expect(httpClient.go( EasyMock.capture(captured3), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -551,8 +549,8 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport public void testResume() throws Exception { Capture captured = Capture.newInstance(); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -579,8 +577,8 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport Map endOffsets = ImmutableMap.of("0", "15", "1", "120"); Capture captured = Capture.newInstance(); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -608,8 +606,8 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport Map endOffsets = ImmutableMap.of("0", "15", "1", "120"); Capture captured = Capture.newInstance(); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -635,8 +633,8 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport public void testStop() throws Exception { Capture captured = Capture.newInstance(); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -661,8 +659,8 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport public void testStopAndPublish() throws Exception { Capture captured = Capture.newInstance(); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -688,8 +686,8 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport { final int numRequests = TEST_IDS.size(); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -724,8 +722,8 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport { final int numRequests = TEST_IDS.size(); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -760,9 +758,9 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport { final int numRequests = TEST_IDS.size(); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); - expect(responseHolder.getContent()).andReturn("{\"0\":\"1\"}").anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(responseHolder.getContent()).andReturn("{\"0\":\"1\"}").anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -797,9 +795,9 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport { final int numRequests = TEST_IDS.size(); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); - expect(responseHolder.getContent()).andReturn("\"READING\"").anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(responseHolder.getContent()).andReturn("\"READING\"").anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -835,9 +833,9 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport final DateTime now = DateTimes.nowUtc(); final int numRequests = TEST_IDS.size(); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); - expect(responseHolder.getContent()).andReturn(String.valueOf(now.getMillis())).anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(responseHolder.getContent()).andReturn(String.valueOf(now.getMillis())).anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -872,9 +870,9 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport { final int numRequests = TEST_IDS.size(); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); - expect(responseHolder.getContent()).andReturn("{\"0\":\"1\"}").anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(responseHolder.getContent()).andReturn("{\"0\":\"1\"}").anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -909,9 +907,9 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport { final int numRequests = TEST_IDS.size(); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); - expect(responseHolder.getContent()).andReturn("{\"0\":\"1\"}").anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(responseHolder.getContent()).andReturn("{\"0\":\"1\"}").anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -947,8 +945,8 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport Map endOffsets = ImmutableMap.of("0", "15L", "1", "120L"); final int numRequests = TEST_IDS.size(); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) @@ -990,8 +988,8 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport Map endOffsets = ImmutableMap.of("0", "15L", "1", "120L"); final int numRequests = TEST_IDS.size(); Capture captured = Capture.newInstance(CaptureType.ALL); - expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); - expect(httpClient.go( + EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes(); + EasyMock.expect(httpClient.go( EasyMock.capture(captured), EasyMock.anyObject(FullResponseHandler.class), EasyMock.eq(TEST_HTTP_TIMEOUT) diff --git a/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisIndexTaskTest.java b/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisIndexTaskTest.java index ef5c254a6c2..d8bf83ce2a3 100644 --- a/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisIndexTaskTest.java +++ b/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisIndexTaskTest.java @@ -28,7 +28,6 @@ import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.InjectableValues; import com.fasterxml.jackson.databind.Module; import com.fasterxml.jackson.databind.ObjectMapper; -import com.google.common.base.Predicate; import com.google.common.base.Predicates; import com.google.common.base.Throwables; import com.google.common.collect.FluentIterable; @@ -187,21 +186,14 @@ import java.util.concurrent.Executors; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; -import static org.apache.druid.indexing.seekablestream.SeekableStreamIndexTaskRunner.Status; -import static org.easymock.EasyMock.anyLong; -import static org.easymock.EasyMock.anyObject; -import static org.easymock.EasyMock.anyString; -import static org.easymock.EasyMock.expect; -import static org.easymock.EasyMock.expectLastCall; -import static org.easymock.EasyMock.reset; public class KinesisIndexTaskTest extends EasyMockSupport { - private static final Logger log = new Logger(KinesisIndexTaskTest.class); - private static final ObjectMapper objectMapper = TestHelper.makeJsonMapper(); - private static String stream = "stream"; - private static String shardId1 = "1"; - private static String shardId0 = "0"; + private static final Logger LOG = new Logger(KinesisIndexTaskTest.class); + private static final ObjectMapper OBJECT_MAPPER = TestHelper.makeJsonMapper(); + private static final String STREAM = "stream"; + private static final String SHARD_ID1 = "1"; + private static final String SHARD_ID0 = "0"; private static KinesisRecordSupplier recordSupplier; private static List> records; @@ -215,14 +207,11 @@ public class KinesisIndexTaskTest extends EasyMockSupport private boolean logParseExceptions = true; private Integer maxParseExceptions = null; private Integer maxSavedParseExceptions = null; - private boolean resetOffsetAutomatically = false; private boolean doHandoff = true; - private int maxRowsInMemory = 1000; private Integer maxRowsPerSegment = null; private Long maxTotalRows = null; - private Period intermediateHandoffPeriod = null; + private final Period intermediateHandoffPeriod = null; private int maxRecordsPerPoll; - private boolean skipAvailabilityCheck = false; private TaskToolboxFactory toolboxFactory; private IndexerMetadataStorageCoordinator metadataStorageCoordinator; @@ -235,7 +224,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport private static final DataSchema DATA_SCHEMA = new DataSchema( "test_ds", - objectMapper.convertValue( + OBJECT_MAPPER.convertValue( new StringInputRowParser( new JSONParseSpec( new TimestampSpec("timestamp", "iso", null), @@ -263,7 +252,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport }, new UniformGranularitySpec(Granularities.DAY, Granularities.NONE, null), null, - objectMapper + OBJECT_MAPPER ); @Rule @@ -297,9 +286,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport logParseExceptions = true; maxParseExceptions = null; maxSavedParseExceptions = null; - skipAvailabilityCheck = false; doHandoff = true; - records = generateRecords(stream); + records = generateRecords(STREAM); reportsFile = File.createTempFile("KinesisIndexTaskTestReports-" + System.currentTimeMillis(), "json"); maxRecordsPerPoll = 1; @@ -346,8 +334,18 @@ public class KinesisIndexTaskTest extends EasyMockSupport "5", jb("246140482-04-24T15:36:27.903Z", "x", "z", "10", "20.0", "1.0") ), - new OrderedPartitionableRecord<>(stream, "1", "6", Collections.singletonList(StringUtils.toUtf8("unparseable"))), - new OrderedPartitionableRecord<>(stream, "1", "7", Collections.singletonList(StringUtils.toUtf8("unparseable2"))), + new OrderedPartitionableRecord<>( + stream, + "1", + "6", + Collections.singletonList(StringUtils.toUtf8("unparseable")) + ), + new OrderedPartitionableRecord<>( + stream, + "1", + "7", + Collections.singletonList(StringUtils.toUtf8("unparseable2")) + ), new OrderedPartitionableRecord<>(stream, "1", "8", Collections.singletonList(StringUtils.toUtf8("{}"))), new OrderedPartitionableRecord<>(stream, "1", "9", jb("2013", "f", "y", "10", "20.0", "1.0")), new OrderedPartitionableRecord<>(stream, "1", "10", jb("2049", "f", "y", "notanumber", "20.0", "1.0")), @@ -378,21 +376,22 @@ public class KinesisIndexTaskTest extends EasyMockSupport new OrderedPartitionableRecord<>(stream, "1", "14", jb("2013", "e", "y", "10", "20.0", "1.0")) ); } + @Test(timeout = 120_000L) public void testRunAfterDataInserted() throws Exception { - recordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); + recordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(recordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - recordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + recordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.poll(anyLong())).andReturn(records.subList(2, 5)).once(); + EasyMock.expect(recordSupplier.poll(EasyMock.anyLong())).andReturn(records.subList(2, 5)).once(); recordSupplier.close(); - expectLastCall().once(); + EasyMock.expectLastCall().once(); replayAll(); @@ -401,8 +400,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport new KinesisIndexTaskIOConfig( 0, "sequence0", - new SeekableStreamStartSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "2"), ImmutableSet.of()), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "4")), + new SeekableStreamStartSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "2"), ImmutableSet.of()), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "4")), true, null, null, @@ -434,7 +433,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata( - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "4")) + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "4")) ), metadataStorageCoordinator.getDataSourceMetadata(DATA_SCHEMA.getDataSource()) ); @@ -448,21 +447,21 @@ public class KinesisIndexTaskTest extends EasyMockSupport public void testRunBeforeDataInserted() throws Exception { - recordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); + recordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(recordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - recordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + recordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.poll(anyLong())).andReturn(Collections.emptyList()) - .times(5) - .andReturn(records.subList(13, 15)) - .once(); + EasyMock.expect(recordSupplier.poll(EasyMock.anyLong())).andReturn(Collections.emptyList()) + .times(5) + .andReturn(records.subList(13, 15)) + .once(); recordSupplier.close(); - expectLastCall().once(); + EasyMock.expectLastCall().once(); replayAll(); @@ -471,8 +470,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport new KinesisIndexTaskIOConfig( 0, "sequence0", - new SeekableStreamStartSequenceNumbers<>(stream, ImmutableMap.of(shardId0, "0"), ImmutableSet.of()), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId0, "1")), + new SeekableStreamStartSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID0, "0"), ImmutableSet.of()), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID0, "1")), true, null, null, @@ -503,7 +502,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata( - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId0, "1")) + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID0, "1")) ), metadataStorageCoordinator.getDataSourceMetadata(DATA_SCHEMA.getDataSource()) ); @@ -521,38 +520,38 @@ public class KinesisIndexTaskTest extends EasyMockSupport maxRowsPerSegment = 2; maxRecordsPerPoll = 1; - recordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); + recordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(recordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - recordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + recordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.poll(anyLong())).andReturn(records.subList(0, 5)) - .once() - .andReturn(records.subList(4, records.size())) - .once(); + EasyMock.expect(recordSupplier.poll(EasyMock.anyLong())).andReturn(records.subList(0, 5)) + .once() + .andReturn(records.subList(4, records.size())) + .once(); recordSupplier.close(); - expectLastCall().once(); + EasyMock.expectLastCall().once(); replayAll(); final SeekableStreamStartSequenceNumbers startPartitions = new SeekableStreamStartSequenceNumbers<>( - stream, - ImmutableMap.of(shardId1, "0", shardId0, "0"), + STREAM, + ImmutableMap.of(SHARD_ID1, "0", SHARD_ID0, "0"), ImmutableSet.of() ); final SeekableStreamEndSequenceNumbers checkpoint1 = new SeekableStreamEndSequenceNumbers<>( - stream, - ImmutableMap.of(shardId1, "4", shardId0, "0") + STREAM, + ImmutableMap.of(SHARD_ID1, "4", SHARD_ID0, "0") ); final SeekableStreamEndSequenceNumbers endPartitions = new SeekableStreamEndSequenceNumbers<>( - stream, - ImmutableMap.of(shardId1, "9", shardId0, "1") + STREAM, + ImmutableMap.of(SHARD_ID1, "9", SHARD_ID0, "1") ); final KinesisIndexTask task = createTask( null, @@ -591,7 +590,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport DATA_SCHEMA.getDataSource(), 0, new KinesisDataSourceMetadata(startPartitions), - new KinesisDataSourceMetadata(new SeekableStreamEndSequenceNumbers<>(stream, currentOffsets)) + new KinesisDataSourceMetadata(new SeekableStreamEndSequenceNumbers<>(STREAM, currentOffsets)) ) ) ); @@ -613,8 +612,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals( new KinesisDataSourceMetadata( new SeekableStreamEndSequenceNumbers<>( - stream, - ImmutableMap.of(shardId1, "9", shardId0, "1") + STREAM, + ImmutableMap.of(SHARD_ID1, "9", SHARD_ID0, "1") ) ), metadataStorageCoordinator.getDataSourceMetadata(DATA_SCHEMA.getDataSource()) @@ -640,44 +639,44 @@ public class KinesisIndexTaskTest extends EasyMockSupport maxRowsPerSegment = Integer.MAX_VALUE; maxTotalRows = 3L; - recordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); + recordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(recordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - recordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + recordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.poll(anyLong())).andReturn(records.subList(0, 3)) - .once() - .andReturn(records.subList(2, 10)) - .once() - .andReturn(records.subList(9, 11)); + EasyMock.expect(recordSupplier.poll(EasyMock.anyLong())).andReturn(records.subList(0, 3)) + .once() + .andReturn(records.subList(2, 10)) + .once() + .andReturn(records.subList(9, 11)); recordSupplier.close(); - expectLastCall().once(); + EasyMock.expectLastCall().once(); replayAll(); // Insert data final SeekableStreamStartSequenceNumbers startPartitions = new SeekableStreamStartSequenceNumbers<>( - stream, - ImmutableMap.of(shardId1, "0"), + STREAM, + ImmutableMap.of(SHARD_ID1, "0"), ImmutableSet.of() ); // Checkpointing will happen at either checkpoint1 or checkpoint2 depending on ordering // of events fetched across two partitions from Kafka final SeekableStreamEndSequenceNumbers checkpoint1 = new SeekableStreamEndSequenceNumbers<>( - stream, - ImmutableMap.of(shardId1, "2") + STREAM, + ImmutableMap.of(SHARD_ID1, "2") ); final SeekableStreamEndSequenceNumbers checkpoint2 = new SeekableStreamEndSequenceNumbers<>( - stream, - ImmutableMap.of(shardId1, "9") + STREAM, + ImmutableMap.of(SHARD_ID1, "9") ); final SeekableStreamEndSequenceNumbers endPartitions = new SeekableStreamEndSequenceNumbers<>( - stream, - ImmutableMap.of(shardId1, "10") + STREAM, + ImmutableMap.of(SHARD_ID1, "10") ); final KinesisIndexTask task = createTask( @@ -700,7 +699,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport ); final ListenableFuture future = runTask(task); - while (task.getRunner().getStatus() != Status.PAUSED) { + while (task.getRunner().getStatus() != SeekableStreamIndexTaskRunner.Status.PAUSED) { Thread.sleep(10); } final Map currentOffsets = ImmutableMap.copyOf(task.getRunner().getCurrentOffsets()); @@ -708,7 +707,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(checkpoint1.getPartitionSequenceNumberMap(), currentOffsets); task.getRunner().setEndOffsets(currentOffsets, false); - while (task.getRunner().getStatus() != Status.PAUSED) { + while (task.getRunner().getStatus() != SeekableStreamIndexTaskRunner.Status.PAUSED) { Thread.sleep(10); } @@ -729,7 +728,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport DATA_SCHEMA.getDataSource(), 0, new KinesisDataSourceMetadata(startPartitions), - new KinesisDataSourceMetadata(new SeekableStreamEndSequenceNumbers<>(stream, currentOffsets)) + new KinesisDataSourceMetadata(new SeekableStreamEndSequenceNumbers<>(STREAM, currentOffsets)) ) ) ); @@ -739,9 +738,9 @@ public class KinesisIndexTaskTest extends EasyMockSupport DATA_SCHEMA.getDataSource(), 0, new KinesisDataSourceMetadata( - new SeekableStreamStartSequenceNumbers<>(stream, currentOffsets, currentOffsets.keySet()) + new SeekableStreamStartSequenceNumbers<>(STREAM, currentOffsets, currentOffsets.keySet()) ), - new KinesisDataSourceMetadata(new SeekableStreamEndSequenceNumbers<>(stream, nextOffsets)) + new KinesisDataSourceMetadata(new SeekableStreamEndSequenceNumbers<>(STREAM, nextOffsets)) ) ) ); @@ -760,7 +759,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport SegmentDescriptor desc7 = sd(task, "2013/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2, desc3, desc4, desc5, desc7), publishedDescriptors()); Assert.assertEquals( - new KinesisDataSourceMetadata(new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "10"))), + new KinesisDataSourceMetadata(new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "10"))), metadataStorageCoordinator.getDataSourceMetadata(DATA_SCHEMA.getDataSource()) ); @@ -777,18 +776,18 @@ public class KinesisIndexTaskTest extends EasyMockSupport @Test(timeout = 120_000L) public void testRunWithMinimumMessageTime() throws Exception { - recordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); + recordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(recordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - recordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + recordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.poll(anyLong())).andReturn(records.subList(0, 13)).once(); + EasyMock.expect(recordSupplier.poll(EasyMock.anyLong())).andReturn(records.subList(0, 13)).once(); recordSupplier.close(); - expectLastCall().once(); + EasyMock.expectLastCall().once(); replayAll(); @@ -797,8 +796,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport new KinesisIndexTaskIOConfig( 0, "sequence0", - new SeekableStreamStartSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "0"), ImmutableSet.of()), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "4")), + new SeekableStreamStartSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "0"), ImmutableSet.of()), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "4")), true, DateTimes.of("2010"), null, @@ -814,7 +813,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport final ListenableFuture future = runTask(task); // Wait for the task to start reading - while (task.getRunner().getStatus() != Status.READING) { + while (task.getRunner().getStatus() != SeekableStreamIndexTaskRunner.Status.READING) { Thread.sleep(10); } @@ -833,7 +832,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport SegmentDescriptor desc2 = sd(task, "2011/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( - new KinesisDataSourceMetadata(new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "4"))), + new KinesisDataSourceMetadata(new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "4"))), metadataStorageCoordinator.getDataSourceMetadata(DATA_SCHEMA.getDataSource()) ); @@ -846,18 +845,18 @@ public class KinesisIndexTaskTest extends EasyMockSupport @Test(timeout = 120_000L) public void testRunWithMaximumMessageTime() throws Exception { - recordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); + recordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(recordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - recordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + recordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.poll(anyLong())).andReturn(records.subList(0, 13)).once(); + EasyMock.expect(recordSupplier.poll(EasyMock.anyLong())).andReturn(records.subList(0, 13)).once(); recordSupplier.close(); - expectLastCall().once(); + EasyMock.expectLastCall().once(); replayAll(); @@ -866,8 +865,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport new KinesisIndexTaskIOConfig( 0, "sequence0", - new SeekableStreamStartSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "0"), ImmutableSet.of()), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "4")), + new SeekableStreamStartSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "0"), ImmutableSet.of()), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "4")), true, null, DateTimes.of("2010"), @@ -883,7 +882,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport final ListenableFuture future = runTask(task); // Wait for the task to start reading - while (task.getRunner().getStatus() != Status.READING) { + while (task.getRunner().getStatus() != SeekableStreamIndexTaskRunner.Status.READING) { Thread.sleep(10); } @@ -904,7 +903,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(ImmutableSet.of(desc1, desc2, desc3), publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata( - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "4"))), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "4"))), metadataStorageCoordinator.getDataSourceMetadata(DATA_SCHEMA.getDataSource()) ); @@ -918,18 +917,18 @@ public class KinesisIndexTaskTest extends EasyMockSupport @Test(timeout = 120_000L) public void testRunWithTransformSpec() throws Exception { - recordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); + recordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(recordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - recordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + recordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.poll(anyLong())).andReturn(records.subList(0, 13)).once(); + EasyMock.expect(recordSupplier.poll(EasyMock.anyLong())).andReturn(records.subList(0, 13)).once(); recordSupplier.close(); - expectLastCall().once(); + EasyMock.expectLastCall().once(); replayAll(); @@ -946,8 +945,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport new KinesisIndexTaskIOConfig( 0, "sequence0", - new SeekableStreamStartSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "0"), ImmutableSet.of()), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "4")), + new SeekableStreamStartSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "0"), ImmutableSet.of()), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "4")), true, null, null, @@ -963,7 +962,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport final ListenableFuture future = runTask(task); // Wait for the task to start reading - while (task.getRunner().getStatus() != Status.READING) { + while (task.getRunner().getStatus() != SeekableStreamIndexTaskRunner.Status.READING) { Thread.sleep(10); } @@ -982,7 +981,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(ImmutableSet.of(desc1), publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata( - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "4"))), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "4"))), metadataStorageCoordinator.getDataSourceMetadata(DATA_SCHEMA.getDataSource()) ); @@ -995,18 +994,18 @@ public class KinesisIndexTaskTest extends EasyMockSupport @Test(timeout = 120_000L) public void testRunOnSingletonRange() throws Exception { - recordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); + recordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(recordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - recordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + recordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.poll(anyLong())).andReturn(records.subList(2, 3)).once(); + EasyMock.expect(recordSupplier.poll(EasyMock.anyLong())).andReturn(records.subList(2, 3)).once(); recordSupplier.close(); - expectLastCall().once(); + EasyMock.expectLastCall().once(); replayAll(); @@ -1017,8 +1016,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport new KinesisIndexTaskIOConfig( 0, "sequence0", - new SeekableStreamStartSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "2"), ImmutableSet.of()), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "2")), + new SeekableStreamStartSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "2"), ImmutableSet.of()), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "2")), true, null, null, @@ -1053,18 +1052,18 @@ public class KinesisIndexTaskTest extends EasyMockSupport { handoffConditionTimeout = 5_000; - recordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); + recordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(recordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - recordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + recordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.poll(anyLong())).andReturn(records.subList(2, 13)).once(); + EasyMock.expect(recordSupplier.poll(EasyMock.anyLong())).andReturn(records.subList(2, 13)).once(); recordSupplier.close(); - expectLastCall().once(); + EasyMock.expectLastCall().once(); replayAll(); @@ -1073,8 +1072,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport new KinesisIndexTaskIOConfig( 0, "sequence0", - new SeekableStreamStartSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "2"), ImmutableSet.of()), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "4")), + new SeekableStreamStartSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "2"), ImmutableSet.of()), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "4")), true, null, null, @@ -1105,7 +1104,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata( - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "4")) + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "4")) ), metadataStorageCoordinator.getDataSourceMetadata(DATA_SCHEMA.getDataSource()) ); @@ -1122,18 +1121,18 @@ public class KinesisIndexTaskTest extends EasyMockSupport doHandoff = false; handoffConditionTimeout = 100; - recordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); + recordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(recordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - recordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + recordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.poll(anyLong())).andReturn(records.subList(2, 13)).once(); + EasyMock.expect(recordSupplier.poll(EasyMock.anyLong())).andReturn(records.subList(2, 13)).once(); recordSupplier.close(); - expectLastCall().once(); + EasyMock.expectLastCall().once(); replayAll(); @@ -1142,8 +1141,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport new KinesisIndexTaskIOConfig( 0, "sequence0", - new SeekableStreamStartSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "2"), ImmutableSet.of()), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "4")), + new SeekableStreamStartSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "2"), ImmutableSet.of()), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "4")), true, null, null, @@ -1174,7 +1173,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata( - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "4")) + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "4")) ), metadataStorageCoordinator.getDataSourceMetadata(DATA_SCHEMA.getDataSource()) ); @@ -1194,18 +1193,18 @@ public class KinesisIndexTaskTest extends EasyMockSupport maxParseExceptions = 1000; maxSavedParseExceptions = 2; - recordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); + recordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(recordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - recordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + recordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.poll(anyLong())).andReturn(records.subList(2, 13)).once(); + EasyMock.expect(recordSupplier.poll(EasyMock.anyLong())).andReturn(records.subList(2, 13)).once(); recordSupplier.close(); - expectLastCall().once(); + EasyMock.expectLastCall().once(); replayAll(); @@ -1214,8 +1213,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport new KinesisIndexTaskIOConfig( 0, "sequence0", - new SeekableStreamStartSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "2"), ImmutableSet.of()), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "5")), + new SeekableStreamStartSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "2"), ImmutableSet.of()), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "5")), true, null, null, @@ -1253,18 +1252,18 @@ public class KinesisIndexTaskTest extends EasyMockSupport maxParseExceptions = 7; maxSavedParseExceptions = 7; - recordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); + recordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(recordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - recordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + recordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.poll(anyLong())).andReturn(records.subList(2, 13)).once(); + EasyMock.expect(recordSupplier.poll(EasyMock.anyLong())).andReturn(records.subList(2, 13)).once(); recordSupplier.close(); - expectLastCall().once(); + EasyMock.expectLastCall().once(); replayAll(); @@ -1273,8 +1272,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport new KinesisIndexTaskIOConfig( 0, "sequence0", - new SeekableStreamStartSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "2"), ImmutableSet.of()), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "12")), + new SeekableStreamStartSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "2"), ImmutableSet.of()), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "12")), true, null, null, @@ -1311,7 +1310,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(ImmutableSet.of(desc1, desc2, desc3, desc4), publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata( - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "12")) + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "12")) ), metadataStorageCoordinator.getDataSourceMetadata(DATA_SCHEMA.getDataSource()) ); @@ -1353,18 +1352,18 @@ public class KinesisIndexTaskTest extends EasyMockSupport maxParseExceptions = 2; maxSavedParseExceptions = 2; - recordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); + recordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(recordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - recordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + recordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.poll(anyLong())).andReturn(records.subList(2, 13)).once(); + EasyMock.expect(recordSupplier.poll(EasyMock.anyLong())).andReturn(records.subList(2, 13)).once(); recordSupplier.close(); - expectLastCall().once(); + EasyMock.expectLastCall().once(); replayAll(); @@ -1373,8 +1372,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport new KinesisIndexTaskIOConfig( 0, "sequence0", - new SeekableStreamStartSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "2"), ImmutableSet.of()), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "9")), + new SeekableStreamStartSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "2"), ImmutableSet.of()), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "9")), true, null, null, @@ -1435,18 +1434,18 @@ public class KinesisIndexTaskTest extends EasyMockSupport public void testRunReplicas() throws Exception { // Insert data - recordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); + recordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(recordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - recordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + recordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.poll(anyLong())).andReturn(records.subList(2, 13)).times(2); + EasyMock.expect(recordSupplier.poll(EasyMock.anyLong())).andReturn(records.subList(2, 13)).times(2); recordSupplier.close(); - expectLastCall().times(2); + EasyMock.expectLastCall().times(2); replayAll(); @@ -1455,8 +1454,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport new KinesisIndexTaskIOConfig( 0, "sequence0", - new SeekableStreamStartSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "2"), ImmutableSet.of()), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "4")), + new SeekableStreamStartSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "2"), ImmutableSet.of()), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "4")), true, null, null, @@ -1473,8 +1472,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport new KinesisIndexTaskIOConfig( 0, "sequence0", - new SeekableStreamStartSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "2"), ImmutableSet.of()), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "4")), + new SeekableStreamStartSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "2"), ImmutableSet.of()), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "4")), true, null, null, @@ -1510,7 +1509,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata( - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "4")) + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "4")) ), metadataStorageCoordinator.getDataSourceMetadata(DATA_SCHEMA.getDataSource()) ); @@ -1524,21 +1523,21 @@ public class KinesisIndexTaskTest extends EasyMockSupport @Test(timeout = 120_000L) public void testRunConflicting() throws Exception { - recordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); + recordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(recordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - recordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + recordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.poll(anyLong())).andReturn(records.subList(2, 13)) - .once() - .andReturn(records.subList(3, 13)) - .once(); + EasyMock.expect(recordSupplier.poll(EasyMock.anyLong())).andReturn(records.subList(2, 13)) + .once() + .andReturn(records.subList(3, 13)) + .once(); recordSupplier.close(); - expectLastCall().atLeastOnce(); + EasyMock.expectLastCall().atLeastOnce(); replayAll(); @@ -1547,8 +1546,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport new KinesisIndexTaskIOConfig( 0, "sequence0", - new SeekableStreamStartSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "2"), ImmutableSet.of()), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "4")), + new SeekableStreamStartSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "2"), ImmutableSet.of()), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "4")), true, null, null, @@ -1565,8 +1564,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport new KinesisIndexTaskIOConfig( 1, "sequence1", - new SeekableStreamStartSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "3"), ImmutableSet.of()), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "9")), + new SeekableStreamStartSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "3"), ImmutableSet.of()), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "9")), true, null, null, @@ -1601,7 +1600,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport SegmentDescriptor desc2 = sd(task1, "2011/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( - new KinesisDataSourceMetadata(new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "4"))), + new KinesisDataSourceMetadata(new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "4"))), metadataStorageCoordinator.getDataSourceMetadata(DATA_SCHEMA.getDataSource()) ); @@ -1614,21 +1613,21 @@ public class KinesisIndexTaskTest extends EasyMockSupport @Test(timeout = 120_000L) public void testRunConflictingWithoutTransactions() throws Exception { - recordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); + recordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(recordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - recordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + recordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.poll(anyLong())).andReturn(records.subList(2, 13)) - .once() - .andReturn(records.subList(3, 13)) - .once(); + EasyMock.expect(recordSupplier.poll(EasyMock.anyLong())).andReturn(records.subList(2, 13)) + .once() + .andReturn(records.subList(3, 13)) + .once(); recordSupplier.close(); - expectLastCall().times(2); + EasyMock.expectLastCall().times(2); replayAll(); @@ -1637,8 +1636,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport new KinesisIndexTaskIOConfig( 0, "sequence0", - new SeekableStreamStartSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "2"), ImmutableSet.of()), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "4")), + new SeekableStreamStartSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "2"), ImmutableSet.of()), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "4")), false, null, null, @@ -1655,8 +1654,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport new KinesisIndexTaskIOConfig( 1, "sequence1", - new SeekableStreamStartSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "3"), ImmutableSet.of()), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "9")), + new SeekableStreamStartSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "3"), ImmutableSet.of()), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "9")), false, null, null, @@ -1711,18 +1710,18 @@ public class KinesisIndexTaskTest extends EasyMockSupport public void testRunOneTaskTwoPartitions() throws Exception { // Insert data - recordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); + recordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(recordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - recordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + recordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.poll(anyLong())).andReturn(records.subList(2, records.size())).once(); + EasyMock.expect(recordSupplier.poll(EasyMock.anyLong())).andReturn(records.subList(2, records.size())).once(); recordSupplier.close(); - expectLastCall().once(); + EasyMock.expectLastCall().once(); replayAll(); @@ -1732,11 +1731,11 @@ public class KinesisIndexTaskTest extends EasyMockSupport 0, "sequence1", new SeekableStreamStartSequenceNumbers<>( - stream, - ImmutableMap.of(shardId1, "2", shardId0, "0"), + STREAM, + ImmutableMap.of(SHARD_ID1, "2", SHARD_ID0, "0"), ImmutableSet.of() ), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "4", shardId0, "1")), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "4", SHARD_ID0, "1")), true, null, null, @@ -1772,7 +1771,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(ImmutableSet.of(desc1, desc2, desc4), publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata( - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "4", shardId0, "1")) + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "4", SHARD_ID0, "1")) ), metadataStorageCoordinator.getDataSourceMetadata(DATA_SCHEMA.getDataSource()) ); @@ -1792,21 +1791,21 @@ public class KinesisIndexTaskTest extends EasyMockSupport @Test(timeout = 120_000L) public void testRunTwoTasksTwoPartitions() throws Exception { - recordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); + recordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(recordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - recordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + recordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.poll(anyLong())).andReturn(records.subList(2, 13)) - .once() - .andReturn(records.subList(13, 15)) - .once(); + EasyMock.expect(recordSupplier.poll(EasyMock.anyLong())).andReturn(records.subList(2, 13)) + .once() + .andReturn(records.subList(13, 15)) + .once(); recordSupplier.close(); - expectLastCall().times(2); + EasyMock.expectLastCall().times(2); replayAll(); @@ -1815,8 +1814,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport new KinesisIndexTaskIOConfig( 0, "sequence0", - new SeekableStreamStartSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "2"), ImmutableSet.of()), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "4")), + new SeekableStreamStartSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "2"), ImmutableSet.of()), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "4")), true, null, null, @@ -1833,8 +1832,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport new KinesisIndexTaskIOConfig( 1, "sequence1", - new SeekableStreamStartSequenceNumbers<>(stream, ImmutableMap.of(shardId0, "0"), ImmutableSet.of()), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId0, "1")), + new SeekableStreamStartSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID0, "0"), ImmutableSet.of()), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID0, "1")), true, null, null, @@ -1872,7 +1871,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(ImmutableSet.of(desc1, desc2, desc3, desc4), publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata( - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "4", shardId0, "1")) + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "4", SHARD_ID0, "1")) ), metadataStorageCoordinator.getDataSourceMetadata(DATA_SCHEMA.getDataSource()) ); @@ -1891,18 +1890,18 @@ public class KinesisIndexTaskTest extends EasyMockSupport @Test(timeout = 120_000L) public void testRestore() throws Exception { - final StreamPartition streamPartition = StreamPartition.of(stream, shardId1); + final StreamPartition streamPartition = StreamPartition.of(STREAM, SHARD_ID1); recordSupplier.assign(ImmutableSet.of(streamPartition)); - expectLastCall(); + EasyMock.expectLastCall(); recordSupplier.seek(streamPartition, "2"); - expectLastCall(); - expect(recordSupplier.poll(anyLong())).andReturn(records.subList(2, 4)) - .once() - .andReturn(Collections.emptyList()) - .anyTimes(); + EasyMock.expectLastCall(); + EasyMock.expect(recordSupplier.poll(EasyMock.anyLong())).andReturn(records.subList(2, 4)) + .once() + .andReturn(Collections.emptyList()) + .anyTimes(); recordSupplier.close(); - expectLastCall().once(); + EasyMock.expectLastCall().once(); replayAll(); @@ -1911,8 +1910,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport new KinesisIndexTaskIOConfig( 0, "sequence0", - new SeekableStreamStartSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "2"), ImmutableSet.of()), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "5")), + new SeekableStreamStartSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "2"), ImmutableSet.of()), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "5")), true, null, null, @@ -1940,17 +1939,17 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(TaskState.SUCCESS, future1.get().getStatusCode()); verifyAll(); - reset(recordSupplier); + EasyMock.reset(recordSupplier); recordSupplier.assign(ImmutableSet.of(streamPartition)); - expectLastCall(); + EasyMock.expectLastCall(); recordSupplier.seek(streamPartition, "3"); - expectLastCall(); - expect(recordSupplier.poll(anyLong())).andReturn(records.subList(3, 6)).once(); + EasyMock.expectLastCall(); + EasyMock.expect(recordSupplier.poll(EasyMock.anyLong())).andReturn(records.subList(3, 6)).once(); recordSupplier.assign(ImmutableSet.of()); - expectLastCall(); + EasyMock.expectLastCall(); recordSupplier.close(); - expectLastCall(); + EasyMock.expectLastCall(); replayAll(); @@ -1960,8 +1959,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport new KinesisIndexTaskIOConfig( 0, "sequence0", - new SeekableStreamStartSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "2"), ImmutableSet.of()), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "5")), + new SeekableStreamStartSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "2"), ImmutableSet.of()), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "5")), true, null, null, @@ -2001,7 +2000,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata( - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "5"))), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "5"))), metadataStorageCoordinator.getDataSourceMetadata(DATA_SCHEMA.getDataSource()) ); @@ -2015,30 +2014,30 @@ public class KinesisIndexTaskTest extends EasyMockSupport { maxRowsPerSegment = 2; maxRecordsPerPoll = 1; - records = generateSinglePartitionRecords(stream); + records = generateSinglePartitionRecords(STREAM); - recordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); + recordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); - recordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + recordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); recordSupplier.close(); - expectLastCall().once(); + EasyMock.expectLastCall().once(); // simulate 1 record at a time - expect(recordSupplier.poll(anyLong())).andReturn(Collections.singletonList(records.get(0))) - .once() - .andReturn(Collections.singletonList(records.get(1))) - .once() - .andReturn(Collections.singletonList(records.get(2))) - .once() - .andReturn(Collections.singletonList(records.get(3))) - .once() - .andReturn(Collections.singletonList(records.get(4))) - .once() - .andReturn(Collections.emptyList()) - .anyTimes(); + EasyMock.expect(recordSupplier.poll(EasyMock.anyLong())).andReturn(Collections.singletonList(records.get(0))) + .once() + .andReturn(Collections.singletonList(records.get(1))) + .once() + .andReturn(Collections.singletonList(records.get(2))) + .once() + .andReturn(Collections.singletonList(records.get(3))) + .once() + .andReturn(Collections.singletonList(records.get(4))) + .once() + .andReturn(Collections.emptyList()) + .anyTimes(); replayAll(); @@ -2047,8 +2046,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport new KinesisIndexTaskIOConfig( 0, "sequence0", - new SeekableStreamStartSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "0"), ImmutableSet.of()), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "6")), + new SeekableStreamStartSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "0"), ImmutableSet.of()), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "6")), true, null, null, @@ -2062,8 +2061,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport ); final SeekableStreamEndSequenceNumbers checkpoint1 = new SeekableStreamEndSequenceNumbers<>( - stream, - ImmutableMap.of(shardId1, "4") + STREAM, + ImmutableMap.of(SHARD_ID1, "4") ); final ListenableFuture future1 = runTask(task1); @@ -2082,23 +2081,23 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(TaskState.SUCCESS, future1.get().getStatusCode()); verifyAll(); - reset(recordSupplier); + EasyMock.reset(recordSupplier); - recordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); + recordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); - recordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + recordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.poll(anyLong())).andReturn(Collections.singletonList(records.get(5))) - .once() - .andReturn(Collections.singletonList(records.get(6))) - .once() - .andReturn(Collections.emptyList()) - .anyTimes(); + EasyMock.expect(recordSupplier.poll(EasyMock.anyLong())).andReturn(Collections.singletonList(records.get(5))) + .once() + .andReturn(Collections.singletonList(records.get(6))) + .once() + .andReturn(Collections.emptyList()) + .anyTimes(); recordSupplier.close(); - expectLastCall(); + EasyMock.expectLastCall(); replayAll(); @@ -2108,8 +2107,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport new KinesisIndexTaskIOConfig( 0, "sequence0", - new SeekableStreamStartSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "0"), ImmutableSet.of()), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "6")), + new SeekableStreamStartSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "0"), ImmutableSet.of()), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "6")), true, null, null, @@ -2147,7 +2146,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(ImmutableSet.of(desc1, desc2, desc3, desc4, desc5, desc6), publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata( - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "6")) + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "6")) ), metadataStorageCoordinator.getDataSourceMetadata(DATA_SCHEMA.getDataSource()) ); @@ -2156,15 +2155,15 @@ public class KinesisIndexTaskTest extends EasyMockSupport @Test(timeout = 120_000L) public void testRunWithPauseAndResume() throws Exception { - final StreamPartition streamPartition = StreamPartition.of(stream, shardId1); + final StreamPartition streamPartition = StreamPartition.of(STREAM, SHARD_ID1); recordSupplier.assign(ImmutableSet.of(streamPartition)); - expectLastCall(); + EasyMock.expectLastCall(); recordSupplier.seek(streamPartition, "2"); - expectLastCall(); - expect(recordSupplier.poll(anyLong())).andReturn(records.subList(2, 5)) - .once() - .andReturn(Collections.emptyList()) - .anyTimes(); + EasyMock.expectLastCall(); + EasyMock.expect(recordSupplier.poll(EasyMock.anyLong())).andReturn(records.subList(2, 5)) + .once() + .andReturn(Collections.emptyList()) + .anyTimes(); replayAll(); @@ -2173,8 +2172,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport new KinesisIndexTaskIOConfig( 0, "sequence0", - new SeekableStreamStartSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "2"), ImmutableSet.of()), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "13")), + new SeekableStreamStartSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "2"), ImmutableSet.of()), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "13")), true, null, null, @@ -2195,14 +2194,14 @@ public class KinesisIndexTaskTest extends EasyMockSupport } Assert.assertEquals(3, countEvents(task)); - Assert.assertEquals(Status.READING, task.getRunner().getStatus()); + Assert.assertEquals(SeekableStreamIndexTaskRunner.Status.READING, task.getRunner().getStatus()); task.getRunner().pause(); - while (task.getRunner().getStatus() != Status.PAUSED) { + while (task.getRunner().getStatus() != SeekableStreamIndexTaskRunner.Status.PAUSED) { Thread.sleep(10); } - Assert.assertEquals(Status.PAUSED, task.getRunner().getStatus()); + Assert.assertEquals(SeekableStreamIndexTaskRunner.Status.PAUSED, task.getRunner().getStatus()); verifyAll(); @@ -2218,12 +2217,12 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(currentOffsets, task.getRunner().getCurrentOffsets()); - reset(recordSupplier); + EasyMock.reset(recordSupplier); recordSupplier.assign(ImmutableSet.of()); - expectLastCall(); + EasyMock.expectLastCall(); recordSupplier.close(); - expectLastCall().once(); + EasyMock.expectLastCall().once(); replayAll(); @@ -2245,8 +2244,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata(new SeekableStreamEndSequenceNumbers<>( - stream, - ImmutableMap.of(shardId1, currentOffsets.get(shardId1)) + STREAM, + ImmutableMap.of(SHARD_ID1, currentOffsets.get(SHARD_ID1)) )), metadataStorageCoordinator.getDataSourceMetadata(DATA_SCHEMA.getDataSource()) ); @@ -2263,30 +2262,30 @@ public class KinesisIndexTaskTest extends EasyMockSupport // which has done some incremental handoffs, thus the context will contain starting // sequence sequences from which the task should start reading and ignore the start sequences // Insert data - recordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); + recordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(recordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); - recordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + recordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); - expect(recordSupplier.poll(anyLong())).andReturn(records.subList(2, 13)) - .once(); + EasyMock.expect(recordSupplier.poll(EasyMock.anyLong())).andReturn(records.subList(2, 13)) + .once(); recordSupplier.close(); - expectLastCall(); + EasyMock.expectLastCall(); replayAll(); final TreeMap> sequences = new TreeMap<>(); // Here the sequence number is 1 meaning that one incremental handoff was done by the failed task // and this task should start reading from offset 2 for partition 0 (not offset 1, because end is inclusive) - sequences.put(1, ImmutableMap.of(shardId1, "1")); + sequences.put(1, ImmutableMap.of(SHARD_ID1, "1")); final Map context = new HashMap<>(); context.put( SeekableStreamSupervisor.CHECKPOINTS_CTX_KEY, - objectMapper.writerFor(KinesisSupervisor.CHECKPOINTS_TYPE_REF).writeValueAsString(sequences) + OBJECT_MAPPER.writerFor(KinesisSupervisor.CHECKPOINTS_TYPE_REF).writeValueAsString(sequences) ); @@ -2296,8 +2295,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport new KinesisIndexTaskIOConfig( 0, "sequence0", - new SeekableStreamStartSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "0"), ImmutableSet.of()), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "4")), + new SeekableStreamStartSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "0"), ImmutableSet.of()), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "4")), true, null, null, @@ -2325,7 +2324,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport SegmentDescriptor desc2 = sd(task, "2011/P1D", 0); Assert.assertEquals(ImmutableSet.of(desc1, desc2), publishedDescriptors()); Assert.assertEquals( - new KinesisDataSourceMetadata(new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "4"))), + new KinesisDataSourceMetadata(new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "4"))), metadataStorageCoordinator.getDataSourceMetadata(DATA_SCHEMA.getDataSource()) ); @@ -2337,58 +2336,58 @@ public class KinesisIndexTaskTest extends EasyMockSupport @Test(timeout = 5000L) public void testIncrementalHandOffReadsThroughEndOffsets() throws Exception { - records = generateSinglePartitionRecords(stream); + records = generateSinglePartitionRecords(STREAM); final String baseSequenceName = "sequence0"; // as soon as any segment has more than one record, incremental publishing should happen maxRowsPerSegment = 2; final KinesisRecordSupplier recordSupplier1 = mock(KinesisRecordSupplier.class); - recordSupplier1.assign(anyObject()); - expectLastCall().anyTimes(); - expect(recordSupplier1.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); - recordSupplier1.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); - expect(recordSupplier1.poll(anyLong())).andReturn(records.subList(0, 5)) - .once() - .andReturn(records.subList(4, 10)) - .once(); + recordSupplier1.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(recordSupplier1.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); + recordSupplier1.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(recordSupplier1.poll(EasyMock.anyLong())).andReturn(records.subList(0, 5)) + .once() + .andReturn(records.subList(4, 10)) + .once(); recordSupplier1.close(); - expectLastCall().once(); + EasyMock.expectLastCall().once(); final KinesisRecordSupplier recordSupplier2 = mock(KinesisRecordSupplier.class); - recordSupplier2.assign(anyObject()); - expectLastCall().anyTimes(); - expect(recordSupplier2.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); - recordSupplier2.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); - expect(recordSupplier2.poll(anyLong())).andReturn(records.subList(0, 5)) - .once() - .andReturn(records.subList(4, 10)) - .once(); + recordSupplier2.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(recordSupplier2.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); + recordSupplier2.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(recordSupplier2.poll(EasyMock.anyLong())).andReturn(records.subList(0, 5)) + .once() + .andReturn(records.subList(4, 10)) + .once(); recordSupplier2.close(); - expectLastCall().once(); + EasyMock.expectLastCall().once(); replayAll(); final SeekableStreamStartSequenceNumbers startPartitions = new SeekableStreamStartSequenceNumbers<>( - stream, - ImmutableMap.of(shardId1, "0"), + STREAM, + ImmutableMap.of(SHARD_ID1, "0"), ImmutableSet.of() ); final SeekableStreamEndSequenceNumbers checkpoint1 = new SeekableStreamEndSequenceNumbers<>( - stream, - ImmutableMap.of(shardId1, "4") + STREAM, + ImmutableMap.of(SHARD_ID1, "4") ); final SeekableStreamEndSequenceNumbers checkpoint2 = new SeekableStreamEndSequenceNumbers<>( - stream, - ImmutableMap.of(shardId1, "9") + STREAM, + ImmutableMap.of(SHARD_ID1, "9") ); final SeekableStreamEndSequenceNumbers endPartitions = new SeekableStreamEndSequenceNumbers<>( - stream, - ImmutableMap.of(shardId1, "100") // simulating unlimited + STREAM, + ImmutableMap.of(SHARD_ID1, "100") // simulating unlimited ); final KinesisIndexTaskIOConfig ioConfig = new KinesisIndexTaskIOConfig( 0, @@ -2429,11 +2428,11 @@ public class KinesisIndexTaskTest extends EasyMockSupport this::runTask ); - while (normalReplica.getRunner().getStatus() != Status.PAUSED) { + while (normalReplica.getRunner().getStatus() != SeekableStreamIndexTaskRunner.Status.PAUSED) { Thread.sleep(10); } staleReplica.getRunner().pause(); - while (staleReplica.getRunner().getStatus() != Status.PAUSED) { + while (staleReplica.getRunner().getStatus() != SeekableStreamIndexTaskRunner.Status.PAUSED) { Thread.sleep(10); } Map currentOffsets = ImmutableMap.copyOf(normalReplica.getRunner().getCurrentOffsets()); @@ -2442,10 +2441,10 @@ public class KinesisIndexTaskTest extends EasyMockSupport normalReplica.getRunner().setEndOffsets(currentOffsets, false); staleReplica.getRunner().setEndOffsets(currentOffsets, false); - while (normalReplica.getRunner().getStatus() != Status.PAUSED) { + while (normalReplica.getRunner().getStatus() != SeekableStreamIndexTaskRunner.Status.PAUSED) { Thread.sleep(10); } - while (staleReplica.getRunner().getStatus() != Status.PAUSED) { + while (staleReplica.getRunner().getStatus() != SeekableStreamIndexTaskRunner.Status.PAUSED) { Thread.sleep(10); } currentOffsets = ImmutableMap.copyOf(normalReplica.getRunner().getCurrentOffsets()); @@ -2481,7 +2480,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport Assert.assertEquals(descriptors, publishedDescriptors()); Assert.assertEquals( new KinesisDataSourceMetadata( - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId1, "9")) + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID1, "9")) ), metadataStorageCoordinator.getDataSourceMetadata(DATA_SCHEMA.getDataSource()) ); @@ -2493,12 +2492,12 @@ public class KinesisIndexTaskTest extends EasyMockSupport final TreeMap> checkpoints = new TreeMap<>(); // Here the sequence number is 1 meaning that one incremental handoff was done by the failed task // and this task should start reading from offset 2 for partition 0 (not offset 1, because end is inclusive) - checkpoints.put(0, ImmutableMap.of(shardId0, "0", shardId1, "0")); - checkpoints.put(1, ImmutableMap.of(shardId0, "0", shardId1, "1")); - checkpoints.put(2, ImmutableMap.of(shardId0, "1", shardId1, "3")); + checkpoints.put(0, ImmutableMap.of(SHARD_ID0, "0", SHARD_ID1, "0")); + checkpoints.put(1, ImmutableMap.of(SHARD_ID0, "0", SHARD_ID1, "1")); + checkpoints.put(2, ImmutableMap.of(SHARD_ID0, "1", SHARD_ID1, "3")); final Map context = new HashMap<>(); - context.put("checkpoints", objectMapper.writerFor(KinesisSupervisor.CHECKPOINTS_TYPE_REF) - .writeValueAsString(checkpoints)); + context.put("checkpoints", OBJECT_MAPPER.writerFor(KinesisSupervisor.CHECKPOINTS_TYPE_REF) + .writeValueAsString(checkpoints)); final KinesisIndexTask task = createTask( "task1", @@ -2507,11 +2506,11 @@ public class KinesisIndexTaskTest extends EasyMockSupport 0, "sequence0", new SeekableStreamStartSequenceNumbers<>( - stream, - ImmutableMap.of(shardId0, "0", shardId1, "0"), - ImmutableSet.of(shardId0) + STREAM, + ImmutableMap.of(SHARD_ID0, "0", SHARD_ID1, "0"), + ImmutableSet.of(SHARD_ID0) ), - new SeekableStreamEndSequenceNumbers<>(stream, ImmutableMap.of(shardId0, "1", shardId1, "5")), + new SeekableStreamEndSequenceNumbers<>(STREAM, ImmutableMap.of(SHARD_ID0, "1", SHARD_ID1, "5")), true, null, null, @@ -2579,7 +2578,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport } } catch (Throwable e) { - log.warn(e, "Task failed"); + LOG.warn(e, "Task failed"); return TaskStatus.failure(task.getId(), Throwables.getStackTraceAsString(e)); } } @@ -2591,14 +2590,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport { return Iterables.find( taskLockbox.findLocksForTask(task), - new Predicate() - { - @Override - public boolean apply(TaskLock lock) - { - return lock.getInterval().contains(interval); - } - } + lock -> lock.getInterval().contains(interval) ); } @@ -2626,6 +2618,8 @@ public class KinesisIndexTaskTest extends EasyMockSupport @Nullable final Map context ) throws JsonProcessingException { + boolean resetOffsetAutomatically = false; + int maxRowsInMemory = 1000; final KinesisIndexTaskTuningConfig tuningConfig = new KinesisIndexTaskTuningConfig( maxRowsInMemory, null, @@ -2667,7 +2661,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport if (!context.containsKey(SeekableStreamSupervisor.CHECKPOINTS_CTX_KEY)) { final TreeMap> checkpoints = new TreeMap<>(); checkpoints.put(0, ioConfig.getStartSequenceNumbers().getPartitionSequenceNumberMap()); - final String checkpointsJson = objectMapper + final String checkpointsJson = OBJECT_MAPPER .writerFor(KinesisSupervisor.CHECKPOINTS_TYPE_REF) .writeValueAsString(checkpoints); context.put(SeekableStreamSupervisor.CHECKPOINTS_CTX_KEY, checkpointsJson); @@ -2696,7 +2690,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport dataSchema.getAggregators(), dataSchema.getGranularitySpec(), dataSchema.getTransformSpec(), - objectMapper + OBJECT_MAPPER ); } @@ -2793,7 +2787,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport @Nullable DataSourceMetadata currentDataSourceMetadata ) { - log.info("Adding checkpoint hash to the set"); + LOG.info("Adding checkpoint hash to the set"); checkpointRequestsHash.add( Objects.hash( supervisorId, @@ -3004,7 +2998,7 @@ public class KinesisIndexTaskTest extends EasyMockSupport private IngestionStatsAndErrorsTaskReportData getTaskReportData() throws IOException { - Map taskReports = objectMapper.readValue( + Map taskReports = OBJECT_MAPPER.readValue( reportsFile, new TypeReference>() { diff --git a/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisRecordSupplierTest.java b/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisRecordSupplierTest.java index 6dd7de9c6c5..230432a58eb 100644 --- a/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisRecordSupplierTest.java +++ b/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisRecordSupplierTest.java @@ -38,6 +38,7 @@ import org.apache.druid.indexing.seekablestream.common.StreamPartition; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.StringUtils; import org.easymock.Capture; +import org.easymock.EasyMock; import org.easymock.EasyMockSupport; import org.junit.After; import org.junit.Assert; @@ -50,34 +51,15 @@ import java.util.List; import java.util.Set; import java.util.stream.Collectors; -import static org.easymock.EasyMock.anyObject; -import static org.easymock.EasyMock.anyString; -import static org.easymock.EasyMock.capture; -import static org.easymock.EasyMock.eq; -import static org.easymock.EasyMock.expect; - public class KinesisRecordSupplierTest extends EasyMockSupport { private static final String stream = "stream"; - private static long poll_timeout_millis = 2000; - private static int recordsPerFetch; - private static String shardId1 = "1"; - private static String shardId0 = "0"; - private static String shard1Iterator = "1"; - private static String shard0Iterator = "0"; - private static AmazonKinesis kinesis; - private static DescribeStreamResult describeStreamResult0; - private static DescribeStreamResult describeStreamResult1; - private static GetShardIteratorResult getShardIteratorResult0; - private static GetShardIteratorResult getShardIteratorResult1; - private static GetRecordsResult getRecordsResult0; - private static GetRecordsResult getRecordsResult1; - private static StreamDescription streamDescription0; - private static StreamDescription streamDescription1; - private static Shard shard0; - private static Shard shard1; - private static KinesisRecordSupplier recordSupplier; - private static List shard1Records = ImmutableList.of( + private static final long POLL_TIMEOUT_MILLIS = 2000; + private static final String SHARD_ID1 = "1"; + private static final String SHARD_ID0 = "0"; + private static final String SHARD1_ITERATOR = "1"; + private static final String SHARD0_ITERATOR = "0"; + private static final List SHARD1_RECORDS = ImmutableList.of( new Record().withData(jb("2011", "d", "y", "10", "20.0", "1.0")).withSequenceNumber("0"), new Record().withData(jb("2011", "e", "y", "10", "20.0", "1.0")).withSequenceNumber("1"), new Record().withData(jb("246140482-04-24T15:36:27.903Z", "x", "z", "10", "20.0", "1.0")).withSequenceNumber("2"), @@ -89,38 +71,38 @@ public class KinesisRecordSupplierTest extends EasyMockSupport new Record().withData(jb("2012", "g", "y", "10", "20.0", "1.0")).withSequenceNumber("8"), new Record().withData(jb("2011", "h", "y", "10", "20.0", "1.0")).withSequenceNumber("9") ); - private static List shard0Records = ImmutableList.of( + private static final List SHARD0_RECORDS = ImmutableList.of( new Record().withData(jb("2008", "a", "y", "10", "20.0", "1.0")).withSequenceNumber("0"), new Record().withData(jb("2009", "b", "y", "10", "20.0", "1.0")).withSequenceNumber("1") ); - private static List allRecords = ImmutableList.builder() - .addAll(shard0Records.stream() - .map(x -> new OrderedPartitionableRecord<>( - stream, - shardId0, - x.getSequenceNumber(), - Collections - .singletonList( - toByteArray( - x.getData())) - )) - .collect( - Collectors - .toList())) - .addAll(shard1Records.stream() - .map(x -> new OrderedPartitionableRecord<>( - stream, - shardId1, - x.getSequenceNumber(), - Collections - .singletonList( - toByteArray( - x.getData())) - )) - .collect( - Collectors - .toList())) - .build(); + private static final List ALL_RECORDS = ImmutableList.builder() + .addAll(SHARD0_RECORDS.stream() + .map(x -> new OrderedPartitionableRecord<>( + stream, + SHARD_ID0, + x.getSequenceNumber(), + Collections + .singletonList( + toByteArray( + x.getData())) + )) + .collect( + Collectors + .toList())) + .addAll(SHARD1_RECORDS.stream() + .map(x -> new OrderedPartitionableRecord<>( + stream, + SHARD_ID1, + x.getSequenceNumber(), + Collections + .singletonList( + toByteArray( + x.getData())) + )) + .collect( + Collectors + .toList())) + .build(); private static ByteBuffer jb(String timestamp, String dim1, String dim2, String dimLong, String dimFloat, String met1) { @@ -141,6 +123,20 @@ public class KinesisRecordSupplierTest extends EasyMockSupport } } + private static int recordsPerFetch; + private static AmazonKinesis kinesis; + private static DescribeStreamResult describeStreamResult0; + private static DescribeStreamResult describeStreamResult1; + private static GetShardIteratorResult getShardIteratorResult0; + private static GetShardIteratorResult getShardIteratorResult1; + private static GetRecordsResult getRecordsResult0; + private static GetRecordsResult getRecordsResult1; + private static StreamDescription streamDescription0; + private static StreamDescription streamDescription1; + private static Shard shard0; + private static Shard shard1; + private static KinesisRecordSupplier recordSupplier; + @Before public void setupTest() { @@ -170,22 +166,24 @@ public class KinesisRecordSupplierTest extends EasyMockSupport { final Capture capturedRequest = Capture.newInstance(); - expect(kinesis.describeStream(capture(capturedRequest))).andReturn(describeStreamResult0).once(); - expect(describeStreamResult0.getStreamDescription()).andReturn(streamDescription0).once(); - expect(streamDescription0.getShards()).andReturn(ImmutableList.of(shard0)).once(); - expect(streamDescription0.isHasMoreShards()).andReturn(true).once(); - expect(shard0.getShardId()).andReturn(shardId0).times(2); - expect(kinesis.describeStream(anyObject(DescribeStreamRequest.class))).andReturn(describeStreamResult1).once(); - expect(describeStreamResult1.getStreamDescription()).andReturn(streamDescription1).once(); - expect(streamDescription1.getShards()).andReturn(ImmutableList.of(shard1)).once(); - expect(streamDescription1.isHasMoreShards()).andReturn(false).once(); - expect(shard1.getShardId()).andReturn(shardId1).once(); + EasyMock.expect(kinesis.describeStream(EasyMock.capture(capturedRequest))).andReturn(describeStreamResult0).once(); + EasyMock.expect(describeStreamResult0.getStreamDescription()).andReturn(streamDescription0).once(); + EasyMock.expect(streamDescription0.getShards()).andReturn(ImmutableList.of(shard0)).once(); + EasyMock.expect(streamDescription0.isHasMoreShards()).andReturn(true).once(); + EasyMock.expect(shard0.getShardId()).andReturn(SHARD_ID0).times(2); + EasyMock.expect(kinesis.describeStream(EasyMock.anyObject(DescribeStreamRequest.class))) + .andReturn(describeStreamResult1) + .once(); + EasyMock.expect(describeStreamResult1.getStreamDescription()).andReturn(streamDescription1).once(); + EasyMock.expect(streamDescription1.getShards()).andReturn(ImmutableList.of(shard1)).once(); + EasyMock.expect(streamDescription1.isHasMoreShards()).andReturn(false).once(); + EasyMock.expect(shard1.getShardId()).andReturn(SHARD_ID1).once(); replayAll(); Set> partitions = ImmutableSet.of( - StreamPartition.of(stream, shardId0), - StreamPartition.of(stream, shardId1) + StreamPartition.of(stream, SHARD_ID0), + StreamPartition.of(stream, SHARD_ID1) ); recordSupplier = new KinesisRecordSupplier( @@ -206,7 +204,7 @@ public class KinesisRecordSupplierTest extends EasyMockSupport recordSupplier.assign(partitions); Assert.assertEquals(partitions, recordSupplier.getAssignment()); - Assert.assertEquals(ImmutableSet.of(shardId1, shardId0), recordSupplier.getPartitionIds(stream)); + Assert.assertEquals(ImmutableSet.of(SHARD_ID1, SHARD_ID0), recordSupplier.getPartitionIds(stream)); Assert.assertEquals(Collections.emptyList(), recordSupplier.poll(100)); verifyAll(); @@ -236,38 +234,40 @@ public class KinesisRecordSupplierTest extends EasyMockSupport { recordsPerFetch = 100; - expect(kinesis.getShardIterator( - anyObject(), - eq(shardId0), - anyString(), - anyString() + EasyMock.expect(kinesis.getShardIterator( + EasyMock.anyObject(), + EasyMock.eq(SHARD_ID0), + EasyMock.anyString(), + EasyMock.anyString() )).andReturn( getShardIteratorResult0).anyTimes(); - expect(kinesis.getShardIterator( - anyObject(), - eq(shardId1), - anyString(), - anyString() + EasyMock.expect(kinesis.getShardIterator( + EasyMock.anyObject(), + EasyMock.eq(SHARD_ID1), + EasyMock.anyString(), + EasyMock.anyString() )).andReturn( getShardIteratorResult1).anyTimes(); - expect(getShardIteratorResult0.getShardIterator()).andReturn(shard0Iterator).anyTimes(); - expect(getShardIteratorResult1.getShardIterator()).andReturn(shard1Iterator).anyTimes(); - expect(kinesis.getRecords(generateGetRecordsReq(shard0Iterator, recordsPerFetch))).andReturn(getRecordsResult0) - .anyTimes(); - expect(kinesis.getRecords(generateGetRecordsReq(shard1Iterator, recordsPerFetch))).andReturn(getRecordsResult1) - .anyTimes(); - expect(getRecordsResult0.getRecords()).andReturn(shard0Records).once(); - expect(getRecordsResult1.getRecords()).andReturn(shard1Records).once(); - expect(getRecordsResult0.getNextShardIterator()).andReturn(null).anyTimes(); - expect(getRecordsResult1.getNextShardIterator()).andReturn(null).anyTimes(); + EasyMock.expect(getShardIteratorResult0.getShardIterator()).andReturn(SHARD0_ITERATOR).anyTimes(); + EasyMock.expect(getShardIteratorResult1.getShardIterator()).andReturn(SHARD1_ITERATOR).anyTimes(); + EasyMock.expect(kinesis.getRecords(generateGetRecordsReq(SHARD0_ITERATOR, recordsPerFetch))) + .andReturn(getRecordsResult0) + .anyTimes(); + EasyMock.expect(kinesis.getRecords(generateGetRecordsReq(SHARD1_ITERATOR, recordsPerFetch))) + .andReturn(getRecordsResult1) + .anyTimes(); + EasyMock.expect(getRecordsResult0.getRecords()).andReturn(SHARD0_RECORDS).once(); + EasyMock.expect(getRecordsResult1.getRecords()).andReturn(SHARD1_RECORDS).once(); + EasyMock.expect(getRecordsResult0.getNextShardIterator()).andReturn(null).anyTimes(); + EasyMock.expect(getRecordsResult1.getNextShardIterator()).andReturn(null).anyTimes(); replayAll(); Set> partitions = ImmutableSet.of( - StreamPartition.of(stream, shardId0), - StreamPartition.of(stream, shardId1) + StreamPartition.of(stream, SHARD_ID0), + StreamPartition.of(stream, SHARD_ID1) ); @@ -293,12 +293,12 @@ public class KinesisRecordSupplierTest extends EasyMockSupport } List> polledRecords = cleanRecords(recordSupplier.poll( - poll_timeout_millis)); + POLL_TIMEOUT_MILLIS)); verifyAll(); Assert.assertEquals(partitions, recordSupplier.getAssignment()); - Assert.assertTrue(polledRecords.containsAll(allRecords)); + Assert.assertTrue(polledRecords.containsAll(ALL_RECORDS)); } @Test @@ -307,37 +307,39 @@ public class KinesisRecordSupplierTest extends EasyMockSupport { recordsPerFetch = 100; - expect(kinesis.getShardIterator( - anyObject(), - eq(shardId0), - anyString(), - anyString() + EasyMock.expect(kinesis.getShardIterator( + EasyMock.anyObject(), + EasyMock.eq(SHARD_ID0), + EasyMock.anyString(), + EasyMock.anyString() )).andReturn( getShardIteratorResult0).anyTimes(); - expect(kinesis.getShardIterator( - anyObject(), - eq(shardId1), - anyString(), - anyString() + EasyMock.expect(kinesis.getShardIterator( + EasyMock.anyObject(), + EasyMock.eq(SHARD_ID1), + EasyMock.anyString(), + EasyMock.anyString() )).andReturn( getShardIteratorResult1).anyTimes(); - expect(getShardIteratorResult0.getShardIterator()).andReturn(shard0Iterator).anyTimes(); - expect(getShardIteratorResult1.getShardIterator()).andReturn(shard1Iterator).anyTimes(); - expect(kinesis.getRecords(generateGetRecordsReq(shard0Iterator, recordsPerFetch))).andReturn(getRecordsResult0) - .anyTimes(); - expect(kinesis.getRecords(generateGetRecordsReq(shard1Iterator, recordsPerFetch))).andReturn(getRecordsResult1) - .anyTimes(); - expect(getRecordsResult0.getRecords()).andReturn(shard0Records.subList(1, shard0Records.size())).once(); - expect(getRecordsResult1.getRecords()).andReturn(shard1Records.subList(2, shard1Records.size())).once(); - expect(getRecordsResult0.getNextShardIterator()).andReturn(null).anyTimes(); - expect(getRecordsResult1.getNextShardIterator()).andReturn(null).anyTimes(); + EasyMock.expect(getShardIteratorResult0.getShardIterator()).andReturn(SHARD0_ITERATOR).anyTimes(); + EasyMock.expect(getShardIteratorResult1.getShardIterator()).andReturn(SHARD1_ITERATOR).anyTimes(); + EasyMock.expect(kinesis.getRecords(generateGetRecordsReq(SHARD0_ITERATOR, recordsPerFetch))) + .andReturn(getRecordsResult0) + .anyTimes(); + EasyMock.expect(kinesis.getRecords(generateGetRecordsReq(SHARD1_ITERATOR, recordsPerFetch))) + .andReturn(getRecordsResult1) + .anyTimes(); + EasyMock.expect(getRecordsResult0.getRecords()).andReturn(SHARD0_RECORDS.subList(1, SHARD0_RECORDS.size())).once(); + EasyMock.expect(getRecordsResult1.getRecords()).andReturn(SHARD1_RECORDS.subList(2, SHARD1_RECORDS.size())).once(); + EasyMock.expect(getRecordsResult0.getNextShardIterator()).andReturn(null).anyTimes(); + EasyMock.expect(getRecordsResult1.getNextShardIterator()).andReturn(null).anyTimes(); replayAll(); - StreamPartition shard0Partition = StreamPartition.of(stream, shardId0); - StreamPartition shard1Partition = StreamPartition.of(stream, shardId1); + StreamPartition shard0Partition = StreamPartition.of(stream, SHARD_ID0); + StreamPartition shard1Partition = StreamPartition.of(stream, SHARD_ID1); Set> partitions = ImmutableSet.of( shard0Partition, shard1Partition @@ -357,8 +359,8 @@ public class KinesisRecordSupplierTest extends EasyMockSupport ); recordSupplier.assign(partitions); - recordSupplier.seek(shard1Partition, shard1Records.get(2).getSequenceNumber()); - recordSupplier.seek(shard0Partition, shard0Records.get(1).getSequenceNumber()); + recordSupplier.seek(shard1Partition, SHARD1_RECORDS.get(2).getSequenceNumber()); + recordSupplier.seek(shard0Partition, SHARD0_RECORDS.get(1).getSequenceNumber()); recordSupplier.start(); for (int i = 0; i < 10 && recordSupplier.bufferSize() < 9; i++) { @@ -366,12 +368,12 @@ public class KinesisRecordSupplierTest extends EasyMockSupport } List> polledRecords = cleanRecords(recordSupplier.poll( - poll_timeout_millis)); + POLL_TIMEOUT_MILLIS)); verifyAll(); Assert.assertEquals(9, polledRecords.size()); - Assert.assertTrue(polledRecords.containsAll(allRecords.subList(4, 12))); - Assert.assertTrue(polledRecords.containsAll(allRecords.subList(1, 2))); + Assert.assertTrue(polledRecords.containsAll(ALL_RECORDS.subList(4, 12))); + Assert.assertTrue(polledRecords.containsAll(ALL_RECORDS.subList(1, 2))); } @@ -382,29 +384,29 @@ public class KinesisRecordSupplierTest extends EasyMockSupport { recordsPerFetch = 100; - expect(kinesis.getShardIterator( - anyObject(), - eq(shardId0), - anyString(), - anyString() + EasyMock.expect(kinesis.getShardIterator( + EasyMock.anyObject(), + EasyMock.eq(SHARD_ID0), + EasyMock.anyString(), + EasyMock.anyString() )).andReturn( getShardIteratorResult0).anyTimes(); - expect(kinesis.getShardIterator( - anyObject(), - eq(shardId1), - anyString(), - anyString() + EasyMock.expect(kinesis.getShardIterator( + EasyMock.anyObject(), + EasyMock.eq(SHARD_ID1), + EasyMock.anyString(), + EasyMock.anyString() )).andReturn( getShardIteratorResult1).anyTimes(); - expect(getShardIteratorResult0.getShardIterator()).andReturn(null).once(); - expect(getShardIteratorResult1.getShardIterator()).andReturn(null).once(); + EasyMock.expect(getShardIteratorResult0.getShardIterator()).andReturn(null).once(); + EasyMock.expect(getShardIteratorResult1.getShardIterator()).andReturn(null).once(); replayAll(); - StreamPartition shard0 = StreamPartition.of(stream, shardId0); - StreamPartition shard1 = StreamPartition.of(stream, shardId1); + StreamPartition shard0 = StreamPartition.of(stream, SHARD_ID0); + StreamPartition shard1 = StreamPartition.of(stream, SHARD_ID1); Set> partitions = ImmutableSet.of( shard0, shard1 @@ -430,7 +432,7 @@ public class KinesisRecordSupplierTest extends EasyMockSupport for (int i = 0; i < 10 && recordSupplier.bufferSize() < 2; i++) { Thread.sleep(100); } - Assert.assertEquals(Collections.emptyList(), cleanRecords(recordSupplier.poll(poll_timeout_millis))); + Assert.assertEquals(Collections.emptyList(), cleanRecords(recordSupplier.poll(POLL_TIMEOUT_MILLIS))); verifyAll(); } @@ -438,8 +440,8 @@ public class KinesisRecordSupplierTest extends EasyMockSupport @Test(expected = ISE.class) public void testSeekUnassigned() throws InterruptedException { - StreamPartition shard0 = StreamPartition.of(stream, shardId0); - StreamPartition shard1 = StreamPartition.of(stream, shardId1); + StreamPartition shard0 = StreamPartition.of(stream, SHARD_ID0); + StreamPartition shard1 = StreamPartition.of(stream, SHARD_ID1); Set> partitions = ImmutableSet.of( shard1 ); @@ -469,32 +471,39 @@ public class KinesisRecordSupplierTest extends EasyMockSupport // tests that after doing a seek, the now invalid records in buffer is cleaned up properly recordsPerFetch = 100; - expect(kinesis.getShardIterator( - anyObject(), - eq(shardId1), - anyString(), - eq("5") + EasyMock.expect(kinesis.getShardIterator( + EasyMock.anyObject(), + EasyMock.eq(SHARD_ID1), + EasyMock.anyString(), + EasyMock.eq("5") )).andReturn( getShardIteratorResult1).once(); - expect(kinesis.getShardIterator(anyObject(), eq(shardId1), anyString(), eq("7"))).andReturn(getShardIteratorResult0) - .once(); + EasyMock.expect(kinesis.getShardIterator( + EasyMock.anyObject(), + EasyMock.eq(SHARD_ID1), + EasyMock.anyString(), + EasyMock.eq("7") + )).andReturn(getShardIteratorResult0) + .once(); - expect(getShardIteratorResult1.getShardIterator()).andReturn(shard1Iterator).once(); - expect(getShardIteratorResult0.getShardIterator()).andReturn(shard0Iterator).once(); - expect(kinesis.getRecords(generateGetRecordsReq(shard1Iterator, recordsPerFetch))).andReturn(getRecordsResult1) - .once(); - expect(kinesis.getRecords(generateGetRecordsReq(shard0Iterator, recordsPerFetch))).andReturn(getRecordsResult0) - .once(); - expect(getRecordsResult1.getRecords()).andReturn(shard1Records.subList(5, shard1Records.size())).once(); - expect(getRecordsResult0.getRecords()).andReturn(shard1Records.subList(7, shard1Records.size())).once(); - expect(getRecordsResult1.getNextShardIterator()).andReturn(null).anyTimes(); - expect(getRecordsResult0.getNextShardIterator()).andReturn(null).anyTimes(); + EasyMock.expect(getShardIteratorResult1.getShardIterator()).andReturn(SHARD1_ITERATOR).once(); + EasyMock.expect(getShardIteratorResult0.getShardIterator()).andReturn(SHARD0_ITERATOR).once(); + EasyMock.expect(kinesis.getRecords(generateGetRecordsReq(SHARD1_ITERATOR, recordsPerFetch))) + .andReturn(getRecordsResult1) + .once(); + EasyMock.expect(kinesis.getRecords(generateGetRecordsReq(SHARD0_ITERATOR, recordsPerFetch))) + .andReturn(getRecordsResult0) + .once(); + EasyMock.expect(getRecordsResult1.getRecords()).andReturn(SHARD1_RECORDS.subList(5, SHARD1_RECORDS.size())).once(); + EasyMock.expect(getRecordsResult0.getRecords()).andReturn(SHARD1_RECORDS.subList(7, SHARD1_RECORDS.size())).once(); + EasyMock.expect(getRecordsResult1.getNextShardIterator()).andReturn(null).anyTimes(); + EasyMock.expect(getRecordsResult0.getNextShardIterator()).andReturn(null).anyTimes(); replayAll(); Set> partitions = ImmutableSet.of( - StreamPartition.of(stream, shardId1) + StreamPartition.of(stream, SHARD_ID1) ); recordSupplier = new KinesisRecordSupplier( @@ -511,30 +520,30 @@ public class KinesisRecordSupplierTest extends EasyMockSupport ); recordSupplier.assign(partitions); - recordSupplier.seek(StreamPartition.of(stream, shardId1), "5"); + recordSupplier.seek(StreamPartition.of(stream, SHARD_ID1), "5"); recordSupplier.start(); for (int i = 0; i < 10 && recordSupplier.bufferSize() < 6; i++) { Thread.sleep(100); } - OrderedPartitionableRecord firstRecord = recordSupplier.poll(poll_timeout_millis).get(0); + OrderedPartitionableRecord firstRecord = recordSupplier.poll(POLL_TIMEOUT_MILLIS).get(0); Assert.assertEquals( - allRecords.get(7), + ALL_RECORDS.get(7), firstRecord ); - recordSupplier.seek(StreamPartition.of(stream, shardId1), "7"); + recordSupplier.seek(StreamPartition.of(stream, SHARD_ID1), "7"); recordSupplier.start(); while (recordSupplier.bufferSize() < 4) { Thread.sleep(100); } - OrderedPartitionableRecord record2 = recordSupplier.poll(poll_timeout_millis).get(0); + OrderedPartitionableRecord record2 = recordSupplier.poll(POLL_TIMEOUT_MILLIS).get(0); - Assert.assertEquals(allRecords.get(9), record2); + Assert.assertEquals(ALL_RECORDS.get(9), record2); verifyAll(); } @@ -544,38 +553,40 @@ public class KinesisRecordSupplierTest extends EasyMockSupport { recordsPerFetch = 100; - expect(kinesis.getShardIterator( - anyObject(), - eq(shardId0), - anyString(), - anyString() + EasyMock.expect(kinesis.getShardIterator( + EasyMock.anyObject(), + EasyMock.eq(SHARD_ID0), + EasyMock.anyString(), + EasyMock.anyString() )).andReturn( getShardIteratorResult0).anyTimes(); - expect(kinesis.getShardIterator( - anyObject(), - eq(shardId1), - anyString(), - anyString() + EasyMock.expect(kinesis.getShardIterator( + EasyMock.anyObject(), + EasyMock.eq(SHARD_ID1), + EasyMock.anyString(), + EasyMock.anyString() )).andReturn( getShardIteratorResult1).anyTimes(); - expect(getShardIteratorResult0.getShardIterator()).andReturn(shard0Iterator).anyTimes(); - expect(getShardIteratorResult1.getShardIterator()).andReturn(shard1Iterator).anyTimes(); - expect(kinesis.getRecords(generateGetRecordsReq(shard0Iterator, recordsPerFetch))).andReturn(getRecordsResult0) - .anyTimes(); - expect(kinesis.getRecords(generateGetRecordsReq(shard1Iterator, recordsPerFetch))).andReturn(getRecordsResult1) - .anyTimes(); - expect(getRecordsResult0.getRecords()).andReturn(shard0Records).once(); - expect(getRecordsResult1.getRecords()).andReturn(shard1Records).once(); - expect(getRecordsResult0.getNextShardIterator()).andReturn(null).anyTimes(); - expect(getRecordsResult1.getNextShardIterator()).andReturn(null).anyTimes(); + EasyMock.expect(getShardIteratorResult0.getShardIterator()).andReturn(SHARD0_ITERATOR).anyTimes(); + EasyMock.expect(getShardIteratorResult1.getShardIterator()).andReturn(SHARD1_ITERATOR).anyTimes(); + EasyMock.expect(kinesis.getRecords(generateGetRecordsReq(SHARD0_ITERATOR, recordsPerFetch))) + .andReturn(getRecordsResult0) + .anyTimes(); + EasyMock.expect(kinesis.getRecords(generateGetRecordsReq(SHARD1_ITERATOR, recordsPerFetch))) + .andReturn(getRecordsResult1) + .anyTimes(); + EasyMock.expect(getRecordsResult0.getRecords()).andReturn(SHARD0_RECORDS).once(); + EasyMock.expect(getRecordsResult1.getRecords()).andReturn(SHARD1_RECORDS).once(); + EasyMock.expect(getRecordsResult0.getNextShardIterator()).andReturn(null).anyTimes(); + EasyMock.expect(getRecordsResult1.getNextShardIterator()).andReturn(null).anyTimes(); replayAll(); Set> partitions = ImmutableSet.of( - StreamPartition.of(stream, shardId0), - StreamPartition.of(stream, shardId1) + StreamPartition.of(stream, SHARD_ID0), + StreamPartition.of(stream, SHARD_ID1) ); @@ -601,12 +612,12 @@ public class KinesisRecordSupplierTest extends EasyMockSupport } List> polledRecords = cleanRecords(recordSupplier.poll( - poll_timeout_millis)); + POLL_TIMEOUT_MILLIS)); verifyAll(); Assert.assertEquals(partitions, recordSupplier.getAssignment()); - Assert.assertTrue(polledRecords.containsAll(allRecords)); + Assert.assertTrue(polledRecords.containsAll(ALL_RECORDS)); } /** diff --git a/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisSamplerSpecTest.java b/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisSamplerSpecTest.java index 95f6b4d6241..080759bdd2e 100644 --- a/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisSamplerSpecTest.java +++ b/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/KinesisSamplerSpecTest.java @@ -52,6 +52,7 @@ import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory; import org.apache.druid.segment.TestHelper; import org.apache.druid.segment.indexing.DataSchema; import org.apache.druid.segment.indexing.granularity.UniformGranularitySpec; +import org.easymock.EasyMock; import org.easymock.EasyMockSupport; import org.junit.Assert; import org.junit.Test; @@ -63,10 +64,6 @@ import java.util.Iterator; import java.util.List; import java.util.Map; -import static org.easymock.EasyMock.anyLong; -import static org.easymock.EasyMock.expect; -import static org.easymock.EasyMock.expectLastCall; - public class KinesisSamplerSpecTest extends EasyMockSupport { private static final ObjectMapper objectMapper = TestHelper.makeJsonMapper(); @@ -132,18 +129,18 @@ public class KinesisSamplerSpecTest extends EasyMockSupport @Test(timeout = 10_000L) public void testSample() throws Exception { - expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).once(); + EasyMock.expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).once(); recordSupplier.assign(ImmutableSet.of(StreamPartition.of(STREAM, SHARD_ID))); - expectLastCall().once(); + EasyMock.expectLastCall().once(); recordSupplier.seekToEarliest(ImmutableSet.of(StreamPartition.of(STREAM, SHARD_ID))); - expectLastCall().once(); + EasyMock.expectLastCall().once(); - expect(recordSupplier.poll(anyLong())).andReturn(generateRecords(STREAM)).once(); + EasyMock.expect(recordSupplier.poll(EasyMock.anyLong())).andReturn(generateRecords(STREAM)).once(); recordSupplier.close(); - expectLastCall().once(); + EasyMock.expectLastCall().once(); replayAll(); diff --git a/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/supervisor/KinesisSupervisorTest.java b/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/supervisor/KinesisSupervisorTest.java index beb5b520dca..9b6f8934129 100644 --- a/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/supervisor/KinesisSupervisorTest.java +++ b/extensions-core/kinesis-indexing-service/src/test/java/org/apache/druid/indexing/kinesis/supervisor/KinesisSupervisorTest.java @@ -59,6 +59,7 @@ import org.apache.druid.indexing.overlord.supervisor.SupervisorReport; import org.apache.druid.indexing.overlord.supervisor.SupervisorStateManager; import org.apache.druid.indexing.overlord.supervisor.SupervisorStateManagerConfig; import org.apache.druid.indexing.seekablestream.SeekableStreamEndSequenceNumbers; +import org.apache.druid.indexing.seekablestream.SeekableStreamIndexTaskRunner; import org.apache.druid.indexing.seekablestream.SeekableStreamIndexTaskTuningConfig; import org.apache.druid.indexing.seekablestream.SeekableStreamStartSequenceNumbers; import org.apache.druid.indexing.seekablestream.common.RecordSupplier; @@ -106,12 +107,6 @@ import java.util.Map; import java.util.TreeMap; import java.util.concurrent.Executor; -import static org.apache.druid.indexing.seekablestream.SeekableStreamIndexTaskRunner.Status; -import static org.easymock.EasyMock.anyObject; -import static org.easymock.EasyMock.anyString; -import static org.easymock.EasyMock.expect; -import static org.easymock.EasyMock.expectLastCall; - public class KinesisSupervisorTest extends EasyMockSupport { private static final ObjectMapper objectMapper = TestHelper.makeJsonMapper(); @@ -213,28 +208,31 @@ public class KinesisSupervisorTest extends EasyMockSupport { supervisor = getTestableSupervisor(1, 1, true, "PT1H", null, null); - supervisorRecordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getPartitionIds(stream)).andReturn(ImmutableSet.of(shardId1, shardId0)).anyTimes(); - expect(supervisorRecordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) - .anyTimes(); - supervisorRecordSupplier.seekToLatest(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); - supervisorRecordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + supervisorRecordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) + .andReturn(ImmutableSet.of(shardId1, shardId0)) + .anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getAssignment()) + .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .anyTimes(); + supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); + supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); Capture captured = Capture.newInstance(); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( new KinesisDataSourceMetadata( null ) ).anyTimes(); - expect(taskQueue.add(EasyMock.capture(captured))).andReturn(true); - taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); + EasyMock.expect(taskQueue.add(EasyMock.capture(captured))).andReturn(true); + taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); replayAll(); supervisor.start(); @@ -278,16 +276,19 @@ public class KinesisSupervisorTest extends EasyMockSupport { supervisor = getTestableSupervisor(1, 2, true, "PT1H", null, null); - supervisorRecordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getPartitionIds(stream)).andReturn(ImmutableSet.of(shardId1, shardId0)).anyTimes(); - expect(supervisorRecordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) - .anyTimes(); - supervisorRecordSupplier.seekToLatest(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); - supervisorRecordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + supervisorRecordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) + .andReturn(ImmutableSet.of(shardId1, shardId0)) + .anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getAssignment()) + .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .anyTimes(); + supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); + supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); Capture captured = Capture.newInstance(CaptureType.ALL); EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); @@ -335,16 +336,19 @@ public class KinesisSupervisorTest extends EasyMockSupport { supervisor = getTestableSupervisor(2, 1, true, "PT1H", null, null); - supervisorRecordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getPartitionIds(stream)).andReturn(ImmutableSet.of(shardId1, shardId0)).anyTimes(); - expect(supervisorRecordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) - .anyTimes(); - supervisorRecordSupplier.seekToLatest(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); - supervisorRecordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + supervisorRecordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) + .andReturn(ImmutableSet.of(shardId1, shardId0)) + .anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getAssignment()) + .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .anyTimes(); + supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); + supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); Capture captured = Capture.newInstance(CaptureType.ALL); EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); @@ -409,16 +413,19 @@ public class KinesisSupervisorTest extends EasyMockSupport { supervisor = getTestableSupervisor(2, 1, true, "PT1H", new Period("PT1H"), null); - supervisorRecordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getPartitionIds(stream)).andReturn(ImmutableSet.of(shardId1, shardId0)).anyTimes(); - expect(supervisorRecordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) - .anyTimes(); - supervisorRecordSupplier.seekToLatest(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); - supervisorRecordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + supervisorRecordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) + .andReturn(ImmutableSet.of(shardId1, shardId0)) + .anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getAssignment()) + .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .anyTimes(); + supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); + supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); Capture captured = Capture.newInstance(CaptureType.ALL); EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); @@ -458,16 +465,19 @@ public class KinesisSupervisorTest extends EasyMockSupport { supervisor = getTestableSupervisor(2, 1, true, "PT1H", null, new Period("PT1H")); - supervisorRecordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getPartitionIds(stream)).andReturn(ImmutableSet.of(shardId1, shardId0)).anyTimes(); - expect(supervisorRecordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) - .anyTimes(); - supervisorRecordSupplier.seekToLatest(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); - supervisorRecordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + supervisorRecordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) + .andReturn(ImmutableSet.of(shardId1, shardId0)) + .anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getAssignment()) + .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .anyTimes(); + supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); + supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); Capture captured = Capture.newInstance(CaptureType.ALL); EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); @@ -503,27 +513,30 @@ public class KinesisSupervisorTest extends EasyMockSupport } - @Test /** * Test generating the starting sequences from the partition data stored in druid_dataSource which contains the * sequences of the last built segments. */ + @Test public void testDatasourceMetadata() throws Exception { supervisor = getTestableSupervisor(1, 1, true, "PT1H", null, null); - supervisorRecordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getPartitionIds(stream)).andReturn(ImmutableSet.of(shardId1, shardId0)).anyTimes(); - expect(supervisorRecordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) - .anyTimes(); - supervisorRecordSupplier.seekToLatest(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getEarliestSequenceNumber(shard1Partition)).andReturn("2").anyTimes(); - expect(supervisorRecordSupplier.getEarliestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(anyObject())).andReturn("100").anyTimes(); - supervisorRecordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + supervisorRecordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) + .andReturn(ImmutableSet.of(shardId1, shardId0)) + .anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getAssignment()) + .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .anyTimes(); + supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(shard1Partition)).andReturn("2").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(EasyMock.anyObject())).andReturn("100").anyTimes(); + supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); Capture captured = Capture.newInstance(); EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); @@ -564,17 +577,20 @@ public class KinesisSupervisorTest extends EasyMockSupport { supervisor = getTestableSupervisor(1, 1, true, "PT1H", null, null); - supervisorRecordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getPartitionIds(stream)).andReturn(ImmutableSet.of(shardId1, shardId0)).anyTimes(); - expect(supervisorRecordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) - .anyTimes(); - supervisorRecordSupplier.seekToLatest(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(anyObject())).andReturn("100").anyTimes(); - supervisorRecordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + supervisorRecordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) + .andReturn(ImmutableSet.of(shardId1, shardId0)) + .anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getAssignment()) + .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .anyTimes(); + supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(EasyMock.anyObject())).andReturn("100").anyTimes(); + supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes(); @@ -604,17 +620,20 @@ public class KinesisSupervisorTest extends EasyMockSupport { supervisor = getTestableSupervisor(2, 1, true, "PT1H", null, null); - supervisorRecordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getPartitionIds(stream)).andReturn(ImmutableSet.of(shardId1, shardId0)).anyTimes(); - expect(supervisorRecordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) - .anyTimes(); - supervisorRecordSupplier.seekToLatest(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(anyObject())).andReturn("100").anyTimes(); - supervisorRecordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + supervisorRecordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) + .andReturn(ImmutableSet.of(shardId1, shardId0)) + .anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getAssignment()) + .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .anyTimes(); + supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(EasyMock.anyObject())).andReturn("100").anyTimes(); + supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); // different datasource (don't kill) Task id1 = createKinesisIndexTask( @@ -650,7 +669,7 @@ public class KinesisSupervisorTest extends EasyMockSupport EasyMock.expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); EasyMock.expect(taskStorage.getActiveTasks()).andReturn(existingTasks).anyTimes(); EasyMock.expect(taskClient.getStatusAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(Status.NOT_STARTED)) + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.NOT_STARTED)) .anyTimes(); EasyMock.expect(taskClient.getStartTimeAsync(EasyMock.anyString())) .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) @@ -682,17 +701,20 @@ public class KinesisSupervisorTest extends EasyMockSupport null ); - supervisorRecordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getPartitionIds(stream)).andReturn(ImmutableSet.of(shardId1, shardId0)).anyTimes(); - expect(supervisorRecordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) - .anyTimes(); - supervisorRecordSupplier.seekToLatest(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(anyObject())).andReturn("100").anyTimes(); - supervisorRecordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + supervisorRecordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) + .andReturn(ImmutableSet.of(shardId1, shardId0)) + .anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getAssignment()) + .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .anyTimes(); + supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(EasyMock.anyObject())).andReturn("100").anyTimes(); + supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); Task id1 = createKinesisIndexTask( "id1", @@ -749,7 +771,7 @@ public class KinesisSupervisorTest extends EasyMockSupport EasyMock.expect(taskStorage.getTask("id3")).andReturn(Optional.of(id3)).anyTimes(); EasyMock.expect(taskStorage.getTask("id4")).andReturn(Optional.of(id4)).anyTimes(); EasyMock.expect(taskClient.getStatusAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(Status.NOT_STARTED)) + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.NOT_STARTED)) .anyTimes(); EasyMock.expect(taskClient.getStartTimeAsync(EasyMock.anyString())) .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) @@ -791,17 +813,20 @@ public class KinesisSupervisorTest extends EasyMockSupport public void testRequeueTaskWhenFailed() throws Exception { supervisor = getTestableSupervisor(2, 2, true, "PT1H", null, null); - supervisorRecordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getPartitionIds(stream)).andReturn(ImmutableSet.of(shardId1, shardId0)).anyTimes(); - expect(supervisorRecordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) - .anyTimes(); - supervisorRecordSupplier.seekToLatest(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(anyObject())).andReturn("100").anyTimes(); - supervisorRecordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + supervisorRecordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) + .andReturn(ImmutableSet.of(shardId1, shardId0)) + .anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getAssignment()) + .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .anyTimes(); + supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(EasyMock.anyObject())).andReturn("100").anyTimes(); + supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); Capture captured = Capture.newInstance(CaptureType.ALL); @@ -810,7 +835,7 @@ public class KinesisSupervisorTest extends EasyMockSupport EasyMock.expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); EasyMock.expect(taskClient.getStatusAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(Status.NOT_STARTED)) + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.NOT_STARTED)) .anyTimes(); EasyMock.expect(taskClient.getStartTimeAsync(EasyMock.anyString())) .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) @@ -881,7 +906,7 @@ public class KinesisSupervisorTest extends EasyMockSupport } EasyMock.expect(taskStorage.getStatus(iHaveFailed.getId())) .andReturn(Optional.of(TaskStatus.failure(iHaveFailed.getId()))); - EasyMock.expect(taskStorage.getTask(iHaveFailed.getId())).andReturn(Optional.of((Task) iHaveFailed)).anyTimes(); + EasyMock.expect(taskStorage.getTask(iHaveFailed.getId())).andReturn(Optional.of(iHaveFailed)).anyTimes(); EasyMock.expect(taskQueue.add(EasyMock.capture(aNewTaskCapture))).andReturn(true); EasyMock.replay(taskStorage); EasyMock.replay(taskQueue); @@ -900,17 +925,20 @@ public class KinesisSupervisorTest extends EasyMockSupport public void testRequeueAdoptedTaskWhenFailed() throws Exception { supervisor = getTestableSupervisor(2, 1, true, "PT1H", null, null); - supervisorRecordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getPartitionIds(stream)).andReturn(ImmutableSet.of(shardId1, shardId0)).anyTimes(); - expect(supervisorRecordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) - .anyTimes(); - supervisorRecordSupplier.seekToLatest(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(anyObject())).andReturn("100").anyTimes(); - supervisorRecordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + supervisorRecordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) + .andReturn(ImmutableSet.of(shardId1, shardId0)) + .anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getAssignment()) + .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .anyTimes(); + supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(EasyMock.anyObject())).andReturn("100").anyTimes(); + supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); DateTime now = DateTimes.nowUtc(); DateTime maxi = now.plusMinutes(60); @@ -946,7 +974,7 @@ public class KinesisSupervisorTest extends EasyMockSupport EasyMock.expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); EasyMock.expect(taskStorage.getTask("id1")).andReturn(Optional.of(id1)).anyTimes(); EasyMock.expect(taskClient.getStatusAsync("id1")) - .andReturn(Futures.immediateFuture(Status.READING)); + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.READING)); EasyMock.expect(taskClient.getStartTimeAsync("id1")).andReturn(Futures.immediateFuture(now)).anyTimes(); EasyMock.expect(taskQueue.add(EasyMock.capture(captured))).andReturn(true); EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn( @@ -999,10 +1027,10 @@ public class KinesisSupervisorTest extends EasyMockSupport EasyMock.expect(taskStorage.getStatus(runningTaskId)) .andReturn(Optional.of(TaskStatus.running(runningTaskId))) .anyTimes(); - EasyMock.expect(taskStorage.getTask(iHaveFailed.getId())).andReturn(Optional.of((Task) iHaveFailed)).anyTimes(); + EasyMock.expect(taskStorage.getTask(iHaveFailed.getId())).andReturn(Optional.of(iHaveFailed)).anyTimes(); EasyMock.expect(taskStorage.getTask(runningTaskId)).andReturn(Optional.of(captured.getValue())).anyTimes(); EasyMock.expect(taskClient.getStatusAsync(runningTaskId)) - .andReturn(Futures.immediateFuture(Status.READING)); + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.READING)); EasyMock.expect(taskClient.getStartTimeAsync(runningTaskId)).andReturn(Futures.immediateFuture(now)).anyTimes(); EasyMock.expect(taskQueue.add(EasyMock.capture(aNewTaskCapture))).andReturn(true); EasyMock.replay(taskStorage); @@ -1034,17 +1062,20 @@ public class KinesisSupervisorTest extends EasyMockSupport public void testQueueNextTasksOnSuccess() throws Exception { supervisor = getTestableSupervisor(2, 2, true, "PT1H", null, null); - supervisorRecordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getPartitionIds(stream)).andReturn(ImmutableSet.of(shardId1, shardId0)).anyTimes(); - expect(supervisorRecordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) - .anyTimes(); - supervisorRecordSupplier.seekToLatest(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(anyObject())).andReturn("100").anyTimes(); - supervisorRecordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + supervisorRecordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) + .andReturn(ImmutableSet.of(shardId1, shardId0)) + .anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getAssignment()) + .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .anyTimes(); + supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(EasyMock.anyObject())).andReturn("100").anyTimes(); + supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); Capture captured = Capture.newInstance(CaptureType.ALL); EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); @@ -1052,7 +1083,7 @@ public class KinesisSupervisorTest extends EasyMockSupport EasyMock.expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); EasyMock.expect(taskClient.getStatusAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(Status.NOT_STARTED)) + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.NOT_STARTED)) .anyTimes(); EasyMock.expect(taskClient.getStartTimeAsync(EasyMock.anyString())) .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) @@ -1077,7 +1108,7 @@ public class KinesisSupervisorTest extends EasyMockSupport EasyMock.reset(taskClient); EasyMock.expect(taskClient.getStatusAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(Status.NOT_STARTED)) + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.NOT_STARTED)) .anyTimes(); EasyMock.expect(taskClient.getStartTimeAsync(EasyMock.anyString())) .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) @@ -1134,7 +1165,7 @@ public class KinesisSupervisorTest extends EasyMockSupport } EasyMock.expect(taskStorage.getStatus(iAmSuccess.getId())) .andReturn(Optional.of(TaskStatus.success(iAmSuccess.getId()))); - EasyMock.expect(taskStorage.getTask(iAmSuccess.getId())).andReturn(Optional.of((Task) iAmSuccess)).anyTimes(); + EasyMock.expect(taskStorage.getTask(iAmSuccess.getId())).andReturn(Optional.of(iAmSuccess)).anyTimes(); EasyMock.expect(taskQueue.add(EasyMock.capture(newTasksCapture))).andReturn(true).times(2); EasyMock.expect(taskClient.stopAsync(EasyMock.capture(shutdownTaskIdCapture), EasyMock.eq(false))) .andReturn(Futures.immediateFuture(true)); @@ -1156,18 +1187,21 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisor = getTestableSupervisor(2, 2, true, "PT1M", null, null); - supervisorRecordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getPartitionIds(stream)).andReturn(ImmutableSet.of(shardId1, shardId0)).anyTimes(); - expect(supervisorRecordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) - .anyTimes(); - supervisorRecordSupplier.seekToLatest(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); - supervisorRecordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + supervisorRecordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) + .andReturn(ImmutableSet.of(shardId1, shardId0)) + .anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getAssignment()) + .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .anyTimes(); + supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); + supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); final Capture firstTasks = Capture.newInstance(CaptureType.ALL); EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); @@ -1202,7 +1236,7 @@ public class KinesisSupervisorTest extends EasyMockSupport } EasyMock.expect(taskRunner.getRunningTasks()).andReturn(workItems).anyTimes(); EasyMock.expect(taskClient.getStatusAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(Status.READING)) + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.READING)) .anyTimes(); EasyMock.expect(taskClient.getStartTimeAsync(EasyMock.contains("sequenceName-0"))) .andReturn(Futures.immediateFuture(DateTimes.nowUtc().minusMinutes(2))) @@ -1292,18 +1326,21 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisor = getTestableSupervisor(1, 1, true, "PT1H", null, null); - supervisorRecordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getPartitionIds(stream)).andReturn(ImmutableSet.of(shardId1, shardId0)).anyTimes(); - expect(supervisorRecordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) - .anyTimes(); - supervisorRecordSupplier.seekToLatest(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); - supervisorRecordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + supervisorRecordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) + .andReturn(ImmutableSet.of(shardId1, shardId0)) + .anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getAssignment()) + .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .anyTimes(); + supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); + supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); Task task = createKinesisIndexTask( "id1", @@ -1343,7 +1380,7 @@ public class KinesisSupervisorTest extends EasyMockSupport ) ).anyTimes(); EasyMock.expect(taskClient.getStatusAsync("id1")) - .andReturn(Futures.immediateFuture(Status.PUBLISHING)); + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.PUBLISHING)); EasyMock.expect(taskClient.getCurrentOffsetsAsync("id1", false)) .andReturn(Futures.immediateFuture(ImmutableMap.of( shardId1, @@ -1446,18 +1483,21 @@ public class KinesisSupervisorTest extends EasyMockSupport final TaskLocation location = new TaskLocation("testHost", 1234, -1); supervisor = getTestableSupervisor(1, 1, true, "PT1H", null, null); - supervisorRecordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getPartitionIds(stream)).andReturn(ImmutableSet.of(shardId1, shardId0)).anyTimes(); - expect(supervisorRecordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) - .anyTimes(); - supervisorRecordSupplier.seekToLatest(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); - supervisorRecordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + supervisorRecordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) + .andReturn(ImmutableSet.of(shardId1, shardId0)) + .anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getAssignment()) + .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .anyTimes(); + supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); + supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); Task task = createKinesisIndexTask( "id1", @@ -1497,7 +1537,7 @@ public class KinesisSupervisorTest extends EasyMockSupport ) ).anyTimes(); EasyMock.expect(taskClient.getStatusAsync("id1")) - .andReturn(Futures.immediateFuture(Status.PUBLISHING)); + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.PUBLISHING)); EasyMock.expect(taskClient.getCurrentOffsetsAsync("id1", false)) .andReturn(Futures.immediateFuture(ImmutableMap.of( shardId1, @@ -1592,18 +1632,21 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisor = getTestableSupervisor(1, 1, true, "PT1H", null, null); - supervisorRecordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getPartitionIds(stream)).andReturn(ImmutableSet.of(shardId1, shardId0)).anyTimes(); - expect(supervisorRecordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) - .anyTimes(); - supervisorRecordSupplier.seekToLatest(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); - supervisorRecordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + supervisorRecordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) + .andReturn(ImmutableSet.of(shardId1, shardId0)) + .anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getAssignment()) + .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .anyTimes(); + supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); + supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); Task id1 = createKinesisIndexTask( "id1", DATASOURCE, @@ -1665,9 +1708,9 @@ public class KinesisSupervisorTest extends EasyMockSupport ) ).anyTimes(); EasyMock.expect(taskClient.getStatusAsync("id1")) - .andReturn(Futures.immediateFuture(Status.PUBLISHING)); + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.PUBLISHING)); EasyMock.expect(taskClient.getStatusAsync("id2")) - .andReturn(Futures.immediateFuture(Status.READING)); + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.READING)); EasyMock.expect(taskClient.getStartTimeAsync("id2")).andReturn(Futures.immediateFuture(startTime)); EasyMock.expect(taskClient.getCurrentOffsetsAsync("id1", false)) .andReturn(Futures.immediateFuture(ImmutableMap.of( @@ -1683,7 +1726,7 @@ public class KinesisSupervisorTest extends EasyMockSupport "1" )); EasyMock.expect(taskClient.getCurrentOffsetsAsync("id2", false)) - .andReturn(Futures.immediateFuture((Map) ImmutableMap.of( + .andReturn(Futures.immediateFuture(ImmutableMap.of( shardId1, "12", shardId0, @@ -1763,18 +1806,21 @@ public class KinesisSupervisorTest extends EasyMockSupport public void testKillUnresponsiveTasksWhileGettingStartTime() throws Exception { supervisor = getTestableSupervisor(2, 2, true, "PT1H", null, null); - supervisorRecordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getPartitionIds(stream)).andReturn(ImmutableSet.of(shardId1, shardId0)).anyTimes(); - expect(supervisorRecordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) - .anyTimes(); - supervisorRecordSupplier.seekToLatest(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); - supervisorRecordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + supervisorRecordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) + .andReturn(ImmutableSet.of(shardId1, shardId0)) + .anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getAssignment()) + .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .anyTimes(); + supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); + supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); Capture captured = Capture.newInstance(CaptureType.ALL); EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); @@ -1825,7 +1871,7 @@ public class KinesisSupervisorTest extends EasyMockSupport .anyTimes(); EasyMock.expect(taskStorage.getTask(task.getId())).andReturn(Optional.of(task)).anyTimes(); EasyMock.expect(taskClient.getStatusAsync(task.getId())) - .andReturn(Futures.immediateFuture(Status.NOT_STARTED)); + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.NOT_STARTED)); EasyMock.expect(taskClient.getStartTimeAsync(task.getId())) .andReturn(Futures.immediateFailedFuture(new RuntimeException())); taskQueue.shutdown(task.getId(), "Task [%s] failed to return start time, killing task", task.getId()); @@ -1842,18 +1888,21 @@ public class KinesisSupervisorTest extends EasyMockSupport final TaskLocation location = new TaskLocation("testHost", 1234, -1); supervisor = getTestableSupervisor(2, 2, true, "PT1M", null, null); - supervisorRecordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getPartitionIds(stream)).andReturn(ImmutableSet.of(shardId1, shardId0)).anyTimes(); - expect(supervisorRecordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) - .anyTimes(); - supervisorRecordSupplier.seekToLatest(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); - supervisorRecordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + supervisorRecordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) + .andReturn(ImmutableSet.of(shardId1, shardId0)) + .anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getAssignment()) + .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .anyTimes(); + supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); + supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); Capture captured = Capture.newInstance(CaptureType.ALL); EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); @@ -1905,7 +1954,7 @@ public class KinesisSupervisorTest extends EasyMockSupport } EasyMock.expect(taskRunner.getRunningTasks()).andReturn(workItems).anyTimes(); EasyMock.expect(taskClient.getStatusAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(Status.READING)) + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.READING)) .anyTimes(); EasyMock.expect(taskClient.getStartTimeAsync(EasyMock.contains("sequenceName-0"))) .andReturn(Futures.immediateFuture(DateTimes.nowUtc().minusMinutes(2))) @@ -1947,18 +1996,21 @@ public class KinesisSupervisorTest extends EasyMockSupport final TaskLocation location = new TaskLocation("testHost", 1234, -1); supervisor = getTestableSupervisor(2, 2, true, "PT1M", null, null); - supervisorRecordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getPartitionIds(stream)).andReturn(ImmutableSet.of(shardId1, shardId0)).anyTimes(); - expect(supervisorRecordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) - .anyTimes(); - supervisorRecordSupplier.seekToLatest(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); - supervisorRecordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + supervisorRecordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) + .andReturn(ImmutableSet.of(shardId1, shardId0)) + .anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getAssignment()) + .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .anyTimes(); + supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); + supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); Capture captured = Capture.newInstance(CaptureType.ALL); EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); @@ -2010,7 +2062,7 @@ public class KinesisSupervisorTest extends EasyMockSupport } EasyMock.expect(taskRunner.getRunningTasks()).andReturn(workItems).anyTimes(); EasyMock.expect(taskClient.getStatusAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(Status.READING)) + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.READING)) .anyTimes(); EasyMock.expect(taskClient.getStartTimeAsync(EasyMock.contains("sequenceName-0"))) .andReturn(Futures.immediateFuture(DateTimes.nowUtc().minusMinutes(2))) @@ -2070,7 +2122,7 @@ public class KinesisSupervisorTest extends EasyMockSupport public void testStop() { supervisorRecordSupplier.close(); - expectLastCall().anyTimes(); + EasyMock.expectLastCall().anyTimes(); EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); taskClient.close(); @@ -2092,18 +2144,21 @@ public class KinesisSupervisorTest extends EasyMockSupport final DateTime startTime = DateTimes.nowUtc(); supervisor = getTestableSupervisor(2, 1, true, "PT1H", null, null); - supervisorRecordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getPartitionIds(stream)).andReturn(ImmutableSet.of(shardId1, shardId0)).anyTimes(); - expect(supervisorRecordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) - .anyTimes(); - supervisorRecordSupplier.seekToLatest(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); - supervisorRecordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + supervisorRecordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) + .andReturn(ImmutableSet.of(shardId1, shardId0)) + .anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getAssignment()) + .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .anyTimes(); + supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); + supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); Task id1 = createKinesisIndexTask( "id1", @@ -2196,11 +2251,11 @@ public class KinesisSupervisorTest extends EasyMockSupport ) ).anyTimes(); EasyMock.expect(taskClient.getStatusAsync("id1")) - .andReturn(Futures.immediateFuture(Status.PUBLISHING)); + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.PUBLISHING)); EasyMock.expect(taskClient.getStatusAsync("id2")) - .andReturn(Futures.immediateFuture(Status.READING)); + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.READING)); EasyMock.expect(taskClient.getStatusAsync("id3")) - .andReturn(Futures.immediateFuture(Status.READING)); + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.READING)); EasyMock.expect(taskClient.getStartTimeAsync("id2")).andReturn(Futures.immediateFuture(startTime)); EasyMock.expect(taskClient.getStartTimeAsync("id3")).andReturn(Futures.immediateFuture(startTime)); EasyMock.expect(taskClient.getEndOffsets("id1")).andReturn(ImmutableMap.of( @@ -2235,7 +2290,7 @@ public class KinesisSupervisorTest extends EasyMockSupport EasyMock.reset(taskRunner, taskClient, taskQueue); EasyMock.expect(taskRunner.getRunningTasks()).andReturn(workItems).anyTimes(); EasyMock.expect(taskClient.pauseAsync("id2")) - .andReturn(Futures.immediateFuture((Map) ImmutableMap.of( + .andReturn(Futures.immediateFuture(ImmutableMap.of( shardId1, "12", shardId0, @@ -2249,9 +2304,9 @@ public class KinesisSupervisorTest extends EasyMockSupport ), true)) .andReturn(Futures.immediateFuture(true)); taskQueue.shutdown("id3", "Killing task for graceful shutdown"); - expectLastCall().times(1); + EasyMock.expectLastCall().times(1); taskQueue.shutdown("id3", "Killing task [%s] which hasn't been assigned to a worker", "id3"); - expectLastCall().times(1); + EasyMock.expectLastCall().times(1); EasyMock.replay(taskRunner, taskClient, taskQueue); @@ -2262,7 +2317,9 @@ public class KinesisSupervisorTest extends EasyMockSupport @Test public void testResetNoTasks() { - expect(supervisorRecordSupplier.getPartitionIds(anyObject())).andReturn(Collections.emptySet()).anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(EasyMock.anyObject())) + .andReturn(Collections.emptySet()) + .anyTimes(); EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); @@ -2290,7 +2347,9 @@ public class KinesisSupervisorTest extends EasyMockSupport @Test public void testResetDataSourceMetadata() throws Exception { - expect(supervisorRecordSupplier.getPartitionIds(anyObject())).andReturn(Collections.emptySet()).anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(EasyMock.anyObject())) + .andReturn(Collections.emptySet()) + .anyTimes(); supervisor = getTestableSupervisor(1, 1, true, "PT1H", null, null); EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); @@ -2356,7 +2415,7 @@ public class KinesisSupervisorTest extends EasyMockSupport catch (NullPointerException npe) { // Expected as there will be an attempt to EasyMock.reset partitionGroups sequences to NOT_SET // however there would be no entries in the map as we have not put nay data in kafka - Assert.assertTrue(npe.getCause() == null); + Assert.assertNull(npe.getCause()); } verifyAll(); @@ -2367,7 +2426,9 @@ public class KinesisSupervisorTest extends EasyMockSupport @Test public void testResetNoDataSourceMetadata() { - expect(supervisorRecordSupplier.getPartitionIds(anyObject())).andReturn(Collections.emptySet()).anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(EasyMock.anyObject())) + .andReturn(Collections.emptySet()) + .anyTimes(); supervisor = getTestableSupervisor(1, 1, true, "PT1H", null, null); EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); @@ -2405,18 +2466,21 @@ public class KinesisSupervisorTest extends EasyMockSupport final DateTime startTime = DateTimes.nowUtc(); supervisor = getTestableSupervisor(2, 1, true, "PT1H", null, null); - supervisorRecordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getPartitionIds(stream)).andReturn(ImmutableSet.of(shardId1, shardId0)).anyTimes(); - expect(supervisorRecordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) - .anyTimes(); - supervisorRecordSupplier.seekToLatest(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); - supervisorRecordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + supervisorRecordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) + .andReturn(ImmutableSet.of(shardId1, shardId0)) + .anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getAssignment()) + .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .anyTimes(); + supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); + supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); Task id1 = createKinesisIndexTask( "id1", @@ -2504,11 +2568,11 @@ public class KinesisSupervisorTest extends EasyMockSupport ) ).anyTimes(); EasyMock.expect(taskClient.getStatusAsync("id1")) - .andReturn(Futures.immediateFuture(Status.PUBLISHING)); + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.PUBLISHING)); EasyMock.expect(taskClient.getStatusAsync("id2")) - .andReturn(Futures.immediateFuture(Status.READING)); + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.READING)); EasyMock.expect(taskClient.getStatusAsync("id3")) - .andReturn(Futures.immediateFuture(Status.READING)); + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.READING)); EasyMock.expect(taskClient.getStartTimeAsync("id2")).andReturn(Futures.immediateFuture(startTime)); EasyMock.expect(taskClient.getStartTimeAsync("id3")).andReturn(Futures.immediateFuture(startTime)); EasyMock.expect(taskClient.getEndOffsets("id1")).andReturn(ImmutableMap.of( @@ -2628,18 +2692,21 @@ public class KinesisSupervisorTest extends EasyMockSupport null ); - supervisorRecordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getPartitionIds(stream)).andReturn(ImmutableSet.of(shardId1, shardId0)).anyTimes(); - expect(supervisorRecordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) - .anyTimes(); - supervisorRecordSupplier.seekToLatest(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); - supervisorRecordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + supervisorRecordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) + .andReturn(ImmutableSet.of(shardId1, shardId0)) + .anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getAssignment()) + .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .anyTimes(); + supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); + supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(id1, id2, id3)).anyTimes(); @@ -2655,11 +2722,11 @@ public class KinesisSupervisorTest extends EasyMockSupport ) ).anyTimes(); EasyMock.expect(taskClient.getStatusAsync("id1")) - .andReturn(Futures.immediateFuture(Status.READING)); + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.READING)); EasyMock.expect(taskClient.getStatusAsync("id2")) - .andReturn(Futures.immediateFuture(Status.READING)); + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.READING)); EasyMock.expect(taskClient.getStatusAsync("id3")) - .andReturn(Futures.immediateFuture(Status.READING)); + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.READING)); EasyMock.expect(taskClient.getStartTimeAsync("id1")).andReturn(Futures.immediateFuture(startTime)); EasyMock.expect(taskClient.getStartTimeAsync("id2")).andReturn(Futures.immediateFuture(startTime)); EasyMock.expect(taskClient.getStartTimeAsync("id3")).andReturn(Futures.immediateFuture(startTime)); @@ -2705,7 +2772,7 @@ public class KinesisSupervisorTest extends EasyMockSupport { supervisor = getTestableSupervisor(2, 1, true, "PT1S", null, null, false); //not adding any events - final Task id1; + final KinesisIndexTask id1; id1 = createKinesisIndexTask( "id1", DATASOURCE, @@ -2779,53 +2846,59 @@ public class KinesisSupervisorTest extends EasyMockSupport workItems.add(new TestTaskRunnerWorkItem(id2, null, location2)); workItems.add(new TestTaskRunnerWorkItem(id2, null, location2)); - supervisorRecordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getPartitionIds(stream)).andReturn(ImmutableSet.of(shardId1, shardId0)).anyTimes(); - expect(supervisorRecordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) - .anyTimes(); - supervisorRecordSupplier.seekToLatest(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); - supervisorRecordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + supervisorRecordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) + .andReturn(ImmutableSet.of(shardId1, shardId0)) + .anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getAssignment()) + .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .anyTimes(); + supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); + supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); - expect(taskRunner.getRunningTasks()).andReturn(workItems).anyTimes(); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(id1, id2, id3)).anyTimes(); - expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); - expect(taskStorage.getStatus("id2")).andReturn(Optional.of(TaskStatus.running("id2"))).anyTimes(); - expect(taskStorage.getStatus("id3")).andReturn(Optional.of(TaskStatus.running("id3"))).anyTimes(); - expect(taskStorage.getTask("id1")).andReturn(Optional.of(id1)).anyTimes(); - expect(taskStorage.getTask("id2")).andReturn(Optional.of(id2)).anyTimes(); - expect(taskStorage.getTask("id3")).andReturn(Optional.of(id3)).anyTimes(); - expect( + EasyMock.expect(taskRunner.getRunningTasks()).andReturn(workItems).anyTimes(); + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(id1, id2, id3)).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id2")).andReturn(Optional.of(TaskStatus.running("id2"))).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id3")).andReturn(Optional.of(TaskStatus.running("id3"))).anyTimes(); + EasyMock.expect(taskStorage.getTask("id1")).andReturn(Optional.of(id1)).anyTimes(); + EasyMock.expect(taskStorage.getTask("id2")).andReturn(Optional.of(id2)).anyTimes(); + EasyMock.expect(taskStorage.getTask("id3")).andReturn(Optional.of(id3)).anyTimes(); + EasyMock.expect( indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn(new KinesisDataSourceMetadata( null) ).anyTimes(); - expect(taskClient.getStatusAsync("id1")).andReturn(Futures.immediateFuture(Status.READING)); - expect(taskClient.getStatusAsync("id2")).andReturn(Futures.immediateFuture(Status.READING)); - expect(taskClient.getStatusAsync("id3")).andReturn(Futures.immediateFuture(Status.READING)); + EasyMock.expect(taskClient.getStatusAsync("id1")) + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.READING)); + EasyMock.expect(taskClient.getStatusAsync("id2")) + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.READING)); + EasyMock.expect(taskClient.getStatusAsync("id3")) + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.READING)); final DateTime startTime = DateTimes.nowUtc(); - expect(taskClient.getStartTimeAsync("id1")).andReturn(Futures.immediateFuture(startTime)); - expect(taskClient.getStartTimeAsync("id2")).andReturn(Futures.immediateFuture(startTime)); - expect(taskClient.getStartTimeAsync("id3")).andReturn(Futures.immediateFuture(startTime)); + EasyMock.expect(taskClient.getStartTimeAsync("id1")).andReturn(Futures.immediateFuture(startTime)); + EasyMock.expect(taskClient.getStartTimeAsync("id2")).andReturn(Futures.immediateFuture(startTime)); + EasyMock.expect(taskClient.getStartTimeAsync("id3")).andReturn(Futures.immediateFuture(startTime)); final TreeMap> checkpoints = new TreeMap<>(); checkpoints.put(0, ImmutableMap.of(shardId1, "10", shardId0, "20")); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("id1"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints)) - .times(1); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("id2"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints)) - .times(1); - expect(taskClient.getCheckpointsAsync(EasyMock.contains("id3"), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints)) - .times(1); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("id1"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints)) + .times(1); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("id2"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints)) + .times(1); + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.contains("id3"), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints)) + .times(1); taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); replayAll(); @@ -2837,7 +2910,7 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisor.moveTaskGroupToPendingCompletion(0); supervisor.checkpoint( 0, - ((KinesisIndexTask) id1).getIOConfig().getBaseSequenceName(), + id1.getIOConfig().getBaseSequenceName(), new KinesisDataSourceMetadata( new SeekableStreamStartSequenceNumbers<>(stream, checkpoints.get(0), checkpoints.get(0).keySet()) ), @@ -2866,21 +2939,24 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisor = getTestableSupervisor(2, 1, true, "PT1S", null, null, false); - supervisorRecordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getPartitionIds(stream)).andReturn(ImmutableSet.of(shardId1, shardId0)).anyTimes(); - expect(supervisorRecordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) - .anyTimes(); - supervisorRecordSupplier.seekToLatest(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); - supervisorRecordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + supervisorRecordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) + .andReturn(ImmutableSet.of(shardId1, shardId0)) + .anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getAssignment()) + .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .anyTimes(); + supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); + supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); //not adding any events - final Task id1 = createKinesisIndexTask( + final KinesisIndexTask id1 = createKinesisIndexTask( "id1", DATASOURCE, 0, @@ -2946,16 +3022,16 @@ public class KinesisSupervisorTest extends EasyMockSupport null ); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(id1, id2, id3)).anyTimes(); - expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); - expect(taskStorage.getStatus("id2")).andReturn(Optional.of(TaskStatus.running("id2"))).anyTimes(); - expect(taskStorage.getStatus("id3")).andReturn(Optional.of(TaskStatus.running("id3"))).anyTimes(); - expect(taskStorage.getTask("id1")).andReturn(Optional.of(id1)).anyTimes(); - expect(taskStorage.getTask("id2")).andReturn(Optional.of(id2)).anyTimes(); - expect(taskStorage.getTask("id3")).andReturn(Optional.of(id3)).anyTimes(); - expect( + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(id1, id2, id3)).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id2")).andReturn(Optional.of(TaskStatus.running("id2"))).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id3")).andReturn(Optional.of(TaskStatus.running("id3"))).anyTimes(); + EasyMock.expect(taskStorage.getTask("id1")).andReturn(Optional.of(id1)).anyTimes(); + EasyMock.expect(taskStorage.getTask("id2")).andReturn(Optional.of(id2)).anyTimes(); + EasyMock.expect(taskStorage.getTask("id3")).andReturn(Optional.of(id3)).anyTimes(); + EasyMock.expect( indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn(new KinesisDataSourceMetadata( null) ).anyTimes(); @@ -2966,7 +3042,7 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisor.checkpoint( 0, - ((KinesisIndexTask) id1).getIOConfig().getBaseSequenceName(), + id1.getIOConfig().getBaseSequenceName(), new KinesisDataSourceMetadata(new SeekableStreamStartSequenceNumbers<>( stream, Collections.emptyMap(), @@ -3005,7 +3081,7 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisor.getStateManager().markRunFinished(); //not adding any events - final Task id1 = createKinesisIndexTask( + final KinesisIndexTask id1 = createKinesisIndexTask( "id1", DATASOURCE, 0, @@ -3044,42 +3120,42 @@ public class KinesisSupervisorTest extends EasyMockSupport null ); - expect(supervisorRecordSupplier.getPartitionIds(stream)).andReturn(Collections.emptySet()).anyTimes(); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(id1, id2, id3)).anyTimes(); - expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); - expect(taskStorage.getStatus("id2")).andReturn(Optional.of(TaskStatus.running("id2"))).anyTimes(); - expect(taskStorage.getStatus("id3")).andReturn(Optional.of(TaskStatus.running("id3"))).anyTimes(); - expect(taskStorage.getTask("id1")).andReturn(Optional.of(id1)).anyTimes(); - expect(taskStorage.getTask("id2")).andReturn(Optional.of(id2)).anyTimes(); - expect(taskStorage.getTask("id3")).andReturn(Optional.of(id3)).anyTimes(); - expect( + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)).andReturn(Collections.emptySet()).anyTimes(); + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of(id1, id2, id3)).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id2")).andReturn(Optional.of(TaskStatus.running("id2"))).anyTimes(); + EasyMock.expect(taskStorage.getStatus("id3")).andReturn(Optional.of(TaskStatus.running("id3"))).anyTimes(); + EasyMock.expect(taskStorage.getTask("id1")).andReturn(Optional.of(id1)).anyTimes(); + EasyMock.expect(taskStorage.getTask("id2")).andReturn(Optional.of(id2)).anyTimes(); + EasyMock.expect(taskStorage.getTask("id3")).andReturn(Optional.of(id3)).anyTimes(); + EasyMock.expect( indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn(new KinesisDataSourceMetadata( null) ).anyTimes(); taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); - expect(taskClient.getStatusAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(Status.READING)) - .anyTimes(); + EasyMock.expect(taskClient.getStatusAsync(EasyMock.anyString())) + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.READING)) + .anyTimes(); final TreeMap> checkpoints = new TreeMap<>(); checkpoints.put(0, ImmutableMap.of(shardId1, "0")); - expect(taskClient.getCheckpointsAsync(EasyMock.anyString(), EasyMock.anyBoolean())) - .andReturn(Futures.immediateFuture(checkpoints)) - .times(3); - expect(taskClient.getStartTimeAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) - .anyTimes(); - expect(taskClient.pauseAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(ImmutableMap.of(shardId1, "10"))) - .anyTimes(); - expect(taskClient.setEndOffsetsAsync( + EasyMock.expect(taskClient.getCheckpointsAsync(EasyMock.anyString(), EasyMock.anyBoolean())) + .andReturn(Futures.immediateFuture(checkpoints)) + .times(3); + EasyMock.expect(taskClient.getStartTimeAsync(EasyMock.anyString())) + .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) + .anyTimes(); + EasyMock.expect(taskClient.pauseAsync(EasyMock.anyString())) + .andReturn(Futures.immediateFuture(ImmutableMap.of(shardId1, "10"))) + .anyTimes(); + EasyMock.expect(taskClient.setEndOffsetsAsync( EasyMock.anyString(), EasyMock.eq(ImmutableMap.of(shardId1, "10")), EasyMock.anyBoolean() )) - .andReturn(Futures.immediateFuture(true)) - .anyTimes(); + .andReturn(Futures.immediateFuture(true)) + .anyTimes(); replayAll(); @@ -3091,7 +3167,7 @@ public class KinesisSupervisorTest extends EasyMockSupport newCheckpoints.put(0, ImmutableMap.of(shardId1, "10")); supervisor.checkpoint( null, - ((KinesisIndexTask) id1).getIOConfig().getBaseSequenceName(), + id1.getIOConfig().getBaseSequenceName(), new KinesisDataSourceMetadata(new SeekableStreamStartSequenceNumbers<>( stream, checkpoints.get(0), @@ -3117,7 +3193,9 @@ public class KinesisSupervisorTest extends EasyMockSupport { supervisor = getTestableSupervisor(1, 1, true, "PT1H", null, null, true); - expect(supervisorRecordSupplier.getPartitionIds(anyObject())).andReturn(Collections.emptySet()).anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(EasyMock.anyObject())) + .andReturn(Collections.emptySet()) + .anyTimes(); EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); @@ -3150,18 +3228,21 @@ public class KinesisSupervisorTest extends EasyMockSupport supervisor = getTestableSupervisor(2, 1, true, "PT1H", null, null, true); - supervisorRecordSupplier.assign(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getPartitionIds(stream)).andReturn(ImmutableSet.of(shardId1, shardId0)).anyTimes(); - expect(supervisorRecordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) - .anyTimes(); - supervisorRecordSupplier.seekToLatest(anyObject()); - expectLastCall().anyTimes(); - expect(supervisorRecordSupplier.getEarliestSequenceNumber(anyObject())).andReturn("0").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); - expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); - supervisorRecordSupplier.seek(anyObject(), anyString()); - expectLastCall().anyTimes(); + supervisorRecordSupplier.assign(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(stream)) + .andReturn(ImmutableSet.of(shardId1, shardId0)) + .anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getAssignment()) + .andReturn(ImmutableSet.of(shard1Partition, shard0Partition)) + .anyTimes(); + supervisorRecordSupplier.seekToLatest(EasyMock.anyObject()); + EasyMock.expectLastCall().anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getEarliestSequenceNumber(EasyMock.anyObject())).andReturn("0").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard1Partition)).andReturn("12").anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getLatestSequenceNumber(shard0Partition)).andReturn("1").anyTimes(); + supervisorRecordSupplier.seek(EasyMock.anyObject(), EasyMock.anyString()); + EasyMock.expectLastCall().anyTimes(); Task id1 = createKinesisIndexTask( @@ -3250,11 +3331,11 @@ public class KinesisSupervisorTest extends EasyMockSupport ) ).anyTimes(); EasyMock.expect(taskClient.getStatusAsync("id1")) - .andReturn(Futures.immediateFuture(Status.PUBLISHING)); + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.PUBLISHING)); EasyMock.expect(taskClient.getStatusAsync("id2")) - .andReturn(Futures.immediateFuture(Status.READING)); + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.READING)); EasyMock.expect(taskClient.getStatusAsync("id3")) - .andReturn(Futures.immediateFuture(Status.READING)); + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.READING)); EasyMock.expect(taskClient.getStartTimeAsync("id2")).andReturn(Futures.immediateFuture(startTime)); EasyMock.expect(taskClient.getStartTimeAsync("id3")).andReturn(Futures.immediateFuture(startTime)); EasyMock.expect(taskClient.getEndOffsets("id1")).andReturn(ImmutableMap.of( @@ -3296,9 +3377,9 @@ public class KinesisSupervisorTest extends EasyMockSupport ), true)) .andReturn(Futures.immediateFuture(true)); taskQueue.shutdown("id3", "Killing task for graceful shutdown"); - expectLastCall().times(1); + EasyMock.expectLastCall().times(1); taskQueue.shutdown("id3", "Killing task [%s] which hasn't been assigned to a worker", "id3"); - expectLastCall().times(1); + EasyMock.expectLastCall().times(1); replayAll(); supervisor.start(); @@ -3309,7 +3390,9 @@ public class KinesisSupervisorTest extends EasyMockSupport @Test public void testResetSuspended() { - expect(supervisorRecordSupplier.getPartitionIds(anyObject())).andReturn(Collections.emptySet()).anyTimes(); + EasyMock.expect(supervisorRecordSupplier.getPartitionIds(EasyMock.anyObject())) + .andReturn(Collections.emptySet()) + .anyTimes(); EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); EasyMock.expect(taskRunner.getRunningTasks()).andReturn(Collections.EMPTY_LIST).anyTimes(); @@ -3353,12 +3436,12 @@ public class KinesisSupervisorTest extends EasyMockSupport ImmutableSet.of() ); - expect(taskClient.getMovingAveragesAsync("task1")).andReturn(Futures.immediateFuture(ImmutableMap.of( + EasyMock.expect(taskClient.getMovingAveragesAsync("task1")).andReturn(Futures.immediateFuture(ImmutableMap.of( "prop1", "val1" ))).times(1); - expect(taskClient.getMovingAveragesAsync("task2")).andReturn(Futures.immediateFuture(ImmutableMap.of( + EasyMock.expect(taskClient.getMovingAveragesAsync("task2")).andReturn(Futures.immediateFuture(ImmutableMap.of( "prop2", "val2" ))).times(1); @@ -3440,7 +3523,7 @@ public class KinesisSupervisorTest extends EasyMockSupport EasyMock.expect(taskStorage.getStatus("id2")).andReturn(Optional.of(TaskStatus.running("id2"))).anyTimes(); EasyMock.expect(taskStorage.getTask("id2")).andReturn(Optional.of(task)).anyTimes(); EasyMock.expect(taskClient.getStatusAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(Status.NOT_STARTED)) + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.NOT_STARTED)) .anyTimes(); EasyMock.expect(taskClient.getStartTimeAsync(EasyMock.anyString())) .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) @@ -3536,7 +3619,7 @@ public class KinesisSupervisorTest extends EasyMockSupport EasyMock.expect(taskStorage.getStatus("id1")).andReturn(Optional.of(TaskStatus.running("id1"))).anyTimes(); EasyMock.expect(taskStorage.getTask("id1")).andReturn(Optional.of(task)).anyTimes(); EasyMock.expect(taskClient.getStatusAsync(EasyMock.anyString())) - .andReturn(Futures.immediateFuture(Status.NOT_STARTED)) + .andReturn(Futures.immediateFuture(SeekableStreamIndexTaskRunner.Status.NOT_STARTED)) .anyTimes(); EasyMock.expect(taskClient.getStartTimeAsync(EasyMock.anyString())) .andReturn(Futures.immediateFuture(DateTimes.nowUtc())) @@ -4215,9 +4298,7 @@ public class KinesisSupervisorTest extends EasyMockSupport private class TestableKinesisSupervisor extends KinesisSupervisor { - private final KinesisSupervisorSpec spec; - - public TestableKinesisSupervisor( + TestableKinesisSupervisor( TaskStorage taskStorage, TaskMaster taskMaster, IndexerMetadataStorageCoordinator indexerMetadataStorageCoordinator, @@ -4237,7 +4318,6 @@ public class KinesisSupervisorTest extends EasyMockSupport rowIngestionMetersFactory, null ); - this.spec = spec; } @Override @@ -4267,9 +4347,9 @@ public class KinesisSupervisorTest extends EasyMockSupport private class TestableKinesisSupervisorWithCustomIsTaskCurrent extends TestableKinesisSupervisor { - private boolean isTaskCurrentReturn; + private final boolean isTaskCurrentReturn; - public TestableKinesisSupervisorWithCustomIsTaskCurrent( + TestableKinesisSupervisorWithCustomIsTaskCurrent( TaskStorage taskStorage, TaskMaster taskMaster, IndexerMetadataStorageCoordinator indexerMetadataStorageCoordinator, diff --git a/extensions-core/orc-extensions/src/test/java/org/apache/druid/data/input/orc/OrcHadoopInputRowParserTest.java b/extensions-core/orc-extensions/src/test/java/org/apache/druid/data/input/orc/OrcHadoopInputRowParserTest.java index 8dd399fa0dd..e588e60bdee 100644 --- a/extensions-core/orc-extensions/src/test/java/org/apache/druid/data/input/orc/OrcHadoopInputRowParserTest.java +++ b/extensions-core/orc-extensions/src/test/java/org/apache/druid/data/input/orc/OrcHadoopInputRowParserTest.java @@ -37,6 +37,7 @@ import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl; import org.apache.hadoop.util.ReflectionUtils; import org.apache.orc.mapred.OrcStruct; import org.apache.orc.mapreduce.OrcInputFormat; +import org.junit.Assert; import org.junit.Test; import java.io.File; @@ -44,8 +45,6 @@ import java.io.IOException; import java.util.ArrayList; import java.util.List; -import static org.junit.Assert.assertEquals; - public class OrcHadoopInputRowParserTest { @Test @@ -62,14 +61,14 @@ public class OrcHadoopInputRowParserTest */ OrcStruct data = getFirstRow(job, ((StaticPathSpec) config.getPathSpec()).getPaths()); List rows = (List) config.getParser().parseBatch(data); - assertEquals(3, rows.get(0).getDimensions().size()); - assertEquals("bar", rows.get(0).getDimension("col1").get(0)); + Assert.assertEquals(3, rows.get(0).getDimensions().size()); + Assert.assertEquals("bar", rows.get(0).getDimension("col1").get(0)); String s1 = rows.get(0).getDimension("col2").get(0); String s2 = rows.get(0).getDimension("col2").get(1); String s3 = rows.get(0).getDimension("col2").get(2); - assertEquals("dat1", s1); - assertEquals("dat2", s2); - assertEquals("dat3", s3); + Assert.assertEquals("dat1", s1); + Assert.assertEquals("dat2", s2); + Assert.assertEquals("dat3", s3); } @Test @@ -85,16 +84,16 @@ public class OrcHadoopInputRowParserTest */ OrcStruct data = getFirstRow(job, ((StaticPathSpec) config.getPathSpec()).getPaths()); List rows = (List) config.getParser().parseBatch(data); - assertEquals(7, rows.get(0).getDimensions().size()); - assertEquals("bar", rows.get(0).getDimension("col1").get(0)); - assertEquals("dat1", rows.get(0).getDimension("col2").get(0)); - assertEquals("dat2", rows.get(0).getDimension("col2").get(1)); - assertEquals("dat3", rows.get(0).getDimension("col2").get(2)); - assertEquals(1.1f, rows.get(0).getRaw("col3")); - assertEquals(2L, rows.get(0).getRaw("col4")); - assertEquals(3.5d, rows.get(0).getRaw("col5")); - assertEquals(ImmutableList.of(), rows.get(0).getRaw("col6")); - assertEquals("subval7", rows.get(0).getRaw("col7-subcol7")); + Assert.assertEquals(7, rows.get(0).getDimensions().size()); + Assert.assertEquals("bar", rows.get(0).getDimension("col1").get(0)); + Assert.assertEquals("dat1", rows.get(0).getDimension("col2").get(0)); + Assert.assertEquals("dat2", rows.get(0).getDimension("col2").get(1)); + Assert.assertEquals("dat3", rows.get(0).getDimension("col2").get(2)); + Assert.assertEquals(1.1f, rows.get(0).getRaw("col3")); + Assert.assertEquals(2L, rows.get(0).getRaw("col4")); + Assert.assertEquals(3.5d, rows.get(0).getRaw("col5")); + Assert.assertEquals(ImmutableList.of(), rows.get(0).getRaw("col6")); + Assert.assertEquals("subval7", rows.get(0).getRaw("col7-subcol7")); } @Test @@ -115,40 +114,40 @@ public class OrcHadoopInputRowParserTest OrcStruct data = getFirstRow(job, ((StaticPathSpec) config.getPathSpec()).getPaths()); List rows = (List) config.getParser().parseBatch(data); - assertEquals(14, rows.get(0).getDimensions().size()); - assertEquals("false", rows.get(0).getDimension("boolean1").get(0)); - assertEquals("1", rows.get(0).getDimension("byte1").get(0)); - assertEquals("1024", rows.get(0).getDimension("short1").get(0)); - assertEquals("65536", rows.get(0).getDimension("int1").get(0)); - assertEquals("9223372036854775807", rows.get(0).getDimension("long1").get(0)); - assertEquals("1.0", rows.get(0).getDimension("float1").get(0)); - assertEquals("-15.0", rows.get(0).getDimension("double1").get(0)); - assertEquals("AAECAwQAAA==", rows.get(0).getDimension("bytes1").get(0)); - assertEquals("hi", rows.get(0).getDimension("string1").get(0)); - assertEquals("1.23456786547456E7", rows.get(0).getDimension("decimal1").get(0)); - assertEquals("2", rows.get(0).getDimension("struct_list_struct_int").get(0)); - assertEquals("1", rows.get(0).getDimension("struct_list_struct_intlist").get(0)); - assertEquals("2", rows.get(0).getDimension("struct_list_struct_intlist").get(1)); - assertEquals("good", rows.get(0).getDimension("list_struct_string").get(0)); - assertEquals(DateTimes.of("2000-03-12T15:00:00.0Z"), rows.get(0).getTimestamp()); + Assert.assertEquals(14, rows.get(0).getDimensions().size()); + Assert.assertEquals("false", rows.get(0).getDimension("boolean1").get(0)); + Assert.assertEquals("1", rows.get(0).getDimension("byte1").get(0)); + Assert.assertEquals("1024", rows.get(0).getDimension("short1").get(0)); + Assert.assertEquals("65536", rows.get(0).getDimension("int1").get(0)); + Assert.assertEquals("9223372036854775807", rows.get(0).getDimension("long1").get(0)); + Assert.assertEquals("1.0", rows.get(0).getDimension("float1").get(0)); + Assert.assertEquals("-15.0", rows.get(0).getDimension("double1").get(0)); + Assert.assertEquals("AAECAwQAAA==", rows.get(0).getDimension("bytes1").get(0)); + Assert.assertEquals("hi", rows.get(0).getDimension("string1").get(0)); + Assert.assertEquals("1.23456786547456E7", rows.get(0).getDimension("decimal1").get(0)); + Assert.assertEquals("2", rows.get(0).getDimension("struct_list_struct_int").get(0)); + Assert.assertEquals("1", rows.get(0).getDimension("struct_list_struct_intlist").get(0)); + Assert.assertEquals("2", rows.get(0).getDimension("struct_list_struct_intlist").get(1)); + Assert.assertEquals("good", rows.get(0).getDimension("list_struct_string").get(0)); + Assert.assertEquals(DateTimes.of("2000-03-12T15:00:00.0Z"), rows.get(0).getTimestamp()); // first row has empty 'map' column, so lets read another! List allRows = getAllRows(config); InputRow anotherRow = allRows.get(0); - assertEquals(14, rows.get(0).getDimensions().size()); - assertEquals("true", anotherRow.getDimension("boolean1").get(0)); - assertEquals("100", anotherRow.getDimension("byte1").get(0)); - assertEquals("2048", anotherRow.getDimension("short1").get(0)); - assertEquals("65536", anotherRow.getDimension("int1").get(0)); - assertEquals("9223372036854775807", anotherRow.getDimension("long1").get(0)); - assertEquals("2.0", anotherRow.getDimension("float1").get(0)); - assertEquals("-5.0", anotherRow.getDimension("double1").get(0)); - assertEquals("AAECAwQAAA==", rows.get(0).getDimension("bytes1").get(0)); - assertEquals("bye", anotherRow.getDimension("string1").get(0)); - assertEquals("1.23456786547457E7", anotherRow.getDimension("decimal1").get(0)); - assertEquals("2", anotherRow.getDimension("struct_list_struct_int").get(0)); - assertEquals("cat", anotherRow.getDimension("list_struct_string").get(0)); - assertEquals("5", anotherRow.getDimension("map_struct_int").get(0)); + Assert.assertEquals(14, rows.get(0).getDimensions().size()); + Assert.assertEquals("true", anotherRow.getDimension("boolean1").get(0)); + Assert.assertEquals("100", anotherRow.getDimension("byte1").get(0)); + Assert.assertEquals("2048", anotherRow.getDimension("short1").get(0)); + Assert.assertEquals("65536", anotherRow.getDimension("int1").get(0)); + Assert.assertEquals("9223372036854775807", anotherRow.getDimension("long1").get(0)); + Assert.assertEquals("2.0", anotherRow.getDimension("float1").get(0)); + Assert.assertEquals("-5.0", anotherRow.getDimension("double1").get(0)); + Assert.assertEquals("AAECAwQAAA==", rows.get(0).getDimension("bytes1").get(0)); + Assert.assertEquals("bye", anotherRow.getDimension("string1").get(0)); + Assert.assertEquals("1.23456786547457E7", anotherRow.getDimension("decimal1").get(0)); + Assert.assertEquals("2", anotherRow.getDimension("struct_list_struct_int").get(0)); + Assert.assertEquals("cat", anotherRow.getDimension("list_struct_string").get(0)); + Assert.assertEquals("5", anotherRow.getDimension("map_struct_int").get(0)); } @Test @@ -167,12 +166,12 @@ public class OrcHadoopInputRowParserTest OrcStruct data = getFirstRow(job, ((StaticPathSpec) config.getPathSpec()).getPaths()); List rows = (List) config.getParser().parseBatch(data); - assertEquals(4, rows.get(0).getDimensions().size()); - assertEquals("2", rows.get(0).getDimension("userid").get(0)); - assertEquals("foo", rows.get(0).getDimension("string1").get(0)); - assertEquals("0.8", rows.get(0).getDimension("subtype").get(0)); - assertEquals("1.2", rows.get(0).getDimension("decimal1").get(0)); - assertEquals(DateTimes.of("1969-12-31T16:00:00.0Z"), rows.get(0).getTimestamp()); + Assert.assertEquals(4, rows.get(0).getDimensions().size()); + Assert.assertEquals("2", rows.get(0).getDimension("userid").get(0)); + Assert.assertEquals("foo", rows.get(0).getDimension("string1").get(0)); + Assert.assertEquals("0.8", rows.get(0).getDimension("subtype").get(0)); + Assert.assertEquals("1.2", rows.get(0).getDimension("decimal1").get(0)); + Assert.assertEquals(DateTimes.of("1969-12-31T16:00:00.0Z"), rows.get(0).getTimestamp()); } @Test @@ -189,9 +188,9 @@ public class OrcHadoopInputRowParserTest OrcStruct data = getFirstRow(job, ((StaticPathSpec) config.getPathSpec()).getPaths()); List rows = (List) config.getParser().parseBatch(data); - assertEquals(1, rows.get(0).getDimensions().size()); - assertEquals("1900-12-25T00:00:00.000Z", rows.get(0).getDimension("date").get(0)); - assertEquals(DateTimes.of("1900-05-05T12:34:56.1Z"), rows.get(0).getTimestamp()); + Assert.assertEquals(1, rows.get(0).getDimensions().size()); + Assert.assertEquals("1900-12-25T00:00:00.000Z", rows.get(0).getDimension("date").get(0)); + Assert.assertEquals(DateTimes.of("1900-05-05T12:34:56.1Z"), rows.get(0).getTimestamp()); } @Test @@ -208,9 +207,9 @@ public class OrcHadoopInputRowParserTest OrcStruct data = getFirstRow(job, ((StaticPathSpec) config.getPathSpec()).getPaths()); List rows = (List) config.getParser().parseBatch(data); - assertEquals(1, rows.get(0).getDimensions().size()); - assertEquals("2038-12-25T00:00:00.000Z", rows.get(0).getDimension("date").get(0)); - assertEquals(DateTimes.of("2038-05-05T12:34:56.1Z"), rows.get(0).getTimestamp()); + Assert.assertEquals(1, rows.get(0).getDimensions().size()); + Assert.assertEquals("2038-12-25T00:00:00.000Z", rows.get(0).getDimension("date").get(0)); + Assert.assertEquals(DateTimes.of("2038-05-05T12:34:56.1Z"), rows.get(0).getTimestamp()); } private static HadoopDruidIndexerConfig loadHadoopDruidIndexerConfig(String configPath) diff --git a/extensions-core/parquet-extensions/src/test/java/org/apache/druid/data/input/parquet/CompatParquetInputTest.java b/extensions-core/parquet-extensions/src/test/java/org/apache/druid/data/input/parquet/CompatParquetInputTest.java index 9287599ce06..2169d9818b8 100644 --- a/extensions-core/parquet-extensions/src/test/java/org/apache/druid/data/input/parquet/CompatParquetInputTest.java +++ b/extensions-core/parquet-extensions/src/test/java/org/apache/druid/data/input/parquet/CompatParquetInputTest.java @@ -25,6 +25,7 @@ import org.apache.druid.indexer.HadoopDruidIndexerConfig; import org.apache.druid.indexer.path.StaticPathSpec; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.Job; +import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -32,9 +33,6 @@ import org.junit.runners.Parameterized; import java.io.IOException; import java.util.List; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - @RunWith(Parameterized.class) public class CompatParquetInputTest extends BaseParquetInputTest { @@ -70,8 +68,8 @@ public class CompatParquetInputTest extends BaseParquetInputTest InputRow row = ((List) config.getParser().parseBatch(data)).get(0); // without binaryAsString: true, the value would something like "[104, 101, 121, 32, 116, 104, 105, 115, 32, 105, 115, 3.... ]" - assertEquals(row.getDimension("field").get(0), "hey this is &é(-è_çà)=^$ù*! Ω^^"); - assertEquals(row.getTimestampFromEpoch(), 1471800234); + Assert.assertEquals("hey this is &é(-è_çà)=^$ù*! Ω^^", row.getDimension("field").get(0)); + Assert.assertEquals(1471800234, row.getTimestampFromEpoch()); } @@ -87,10 +85,10 @@ public class CompatParquetInputTest extends BaseParquetInputTest Object data = getFirstRow(job, parserType, ((StaticPathSpec) config.getPathSpec()).getPaths()); List rows = (List) config.getParser().parseBatch(data); List rows2 = getAllRows(parserType, config); - assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString()); - assertEquals("-1", rows.get(0).getDimension("col").get(0)); - assertEquals(-1, rows.get(0).getMetric("metric1")); - assertTrue(rows2.get(2).getDimension("col").isEmpty()); + Assert.assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString()); + Assert.assertEquals("-1", rows.get(0).getDimension("col").get(0)); + Assert.assertEquals(-1, rows.get(0).getMetric("metric1")); + Assert.assertTrue(rows2.get(2).getDimension("col").isEmpty()); } @Test @@ -158,30 +156,30 @@ public class CompatParquetInputTest extends BaseParquetInputTest config.intoConfiguration(job); Object data = getFirstRow(job, parserType, ((StaticPathSpec) config.getPathSpec()).getPaths()); List rows = (List) config.getParser().parseBatch(data); - assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString()); - assertEquals("true", rows.get(0).getDimension("boolColumn").get(0)); - assertEquals("0", rows.get(0).getDimension("byteColumn").get(0)); - assertEquals("1", rows.get(0).getDimension("shortColumn").get(0)); - assertEquals("2", rows.get(0).getDimension("intColumn").get(0)); - assertEquals("0", rows.get(0).getDimension("longColumn").get(0)); - assertEquals("0.2", rows.get(0).getDimension("doubleColumn").get(0)); - assertEquals("val_0", rows.get(0).getDimension("binaryColumn").get(0)); - assertEquals("val_0", rows.get(0).getDimension("stringColumn").get(0)); - assertEquals("SPADES", rows.get(0).getDimension("enumColumn").get(0)); - assertTrue(rows.get(0).getDimension("maybeBoolColumn").isEmpty()); - assertTrue(rows.get(0).getDimension("maybeByteColumn").isEmpty()); - assertTrue(rows.get(0).getDimension("maybeShortColumn").isEmpty()); - assertTrue(rows.get(0).getDimension("maybeIntColumn").isEmpty()); - assertTrue(rows.get(0).getDimension("maybeLongColumn").isEmpty()); - assertTrue(rows.get(0).getDimension("maybeDoubleColumn").isEmpty()); - assertTrue(rows.get(0).getDimension("maybeBinaryColumn").isEmpty()); - assertTrue(rows.get(0).getDimension("maybeStringColumn").isEmpty()); - assertTrue(rows.get(0).getDimension("maybeEnumColumn").isEmpty()); - assertEquals("arr_0", rows.get(0).getDimension("stringsColumn").get(0)); - assertEquals("arr_1", rows.get(0).getDimension("stringsColumn").get(1)); - assertEquals("0", rows.get(0).getDimension("intSetColumn").get(0)); - assertEquals("val_1", rows.get(0).getDimension("extractByLogicalMap").get(0)); - assertEquals("1", rows.get(0).getDimension("extractByComplexLogicalMap").get(0)); + Assert.assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString()); + Assert.assertEquals("true", rows.get(0).getDimension("boolColumn").get(0)); + Assert.assertEquals("0", rows.get(0).getDimension("byteColumn").get(0)); + Assert.assertEquals("1", rows.get(0).getDimension("shortColumn").get(0)); + Assert.assertEquals("2", rows.get(0).getDimension("intColumn").get(0)); + Assert.assertEquals("0", rows.get(0).getDimension("longColumn").get(0)); + Assert.assertEquals("0.2", rows.get(0).getDimension("doubleColumn").get(0)); + Assert.assertEquals("val_0", rows.get(0).getDimension("binaryColumn").get(0)); + Assert.assertEquals("val_0", rows.get(0).getDimension("stringColumn").get(0)); + Assert.assertEquals("SPADES", rows.get(0).getDimension("enumColumn").get(0)); + Assert.assertTrue(rows.get(0).getDimension("maybeBoolColumn").isEmpty()); + Assert.assertTrue(rows.get(0).getDimension("maybeByteColumn").isEmpty()); + Assert.assertTrue(rows.get(0).getDimension("maybeShortColumn").isEmpty()); + Assert.assertTrue(rows.get(0).getDimension("maybeIntColumn").isEmpty()); + Assert.assertTrue(rows.get(0).getDimension("maybeLongColumn").isEmpty()); + Assert.assertTrue(rows.get(0).getDimension("maybeDoubleColumn").isEmpty()); + Assert.assertTrue(rows.get(0).getDimension("maybeBinaryColumn").isEmpty()); + Assert.assertTrue(rows.get(0).getDimension("maybeStringColumn").isEmpty()); + Assert.assertTrue(rows.get(0).getDimension("maybeEnumColumn").isEmpty()); + Assert.assertEquals("arr_0", rows.get(0).getDimension("stringsColumn").get(0)); + Assert.assertEquals("arr_1", rows.get(0).getDimension("stringsColumn").get(1)); + Assert.assertEquals("0", rows.get(0).getDimension("intSetColumn").get(0)); + Assert.assertEquals("val_1", rows.get(0).getDimension("extractByLogicalMap").get(0)); + Assert.assertEquals("1", rows.get(0).getDimension("extractByComplexLogicalMap").get(0)); } @Test @@ -199,10 +197,10 @@ public class CompatParquetInputTest extends BaseParquetInputTest ); config.intoConfiguration(job); List rows = getAllRows(parserType, config); - assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString()); - assertEquals("1", rows.get(0).getDimension("repeatedInt").get(0)); - assertEquals("2", rows.get(0).getDimension("repeatedInt").get(1)); - assertEquals("3", rows.get(0).getDimension("repeatedInt").get(2)); + Assert.assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString()); + Assert.assertEquals("1", rows.get(0).getDimension("repeatedInt").get(0)); + Assert.assertEquals("2", rows.get(0).getDimension("repeatedInt").get(1)); + Assert.assertEquals("3", rows.get(0).getDimension("repeatedInt").get(2)); } @Test @@ -223,10 +221,10 @@ public class CompatParquetInputTest extends BaseParquetInputTest config.intoConfiguration(job); List rows = getAllRows(parserType, config); - assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString()); - assertEquals("5", rows.get(0).getDimension("primitive").get(0)); - assertEquals("4", rows.get(0).getDimension("extracted1").get(0)); - assertEquals("6", rows.get(0).getDimension("extracted2").get(0)); + Assert.assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString()); + Assert.assertEquals("5", rows.get(0).getDimension("primitive").get(0)); + Assert.assertEquals("4", rows.get(0).getDimension("extracted1").get(0)); + Assert.assertEquals("6", rows.get(0).getDimension("extracted2").get(0)); } @Test @@ -245,13 +243,13 @@ public class CompatParquetInputTest extends BaseParquetInputTest ); config.intoConfiguration(job); List rows = getAllRows(parserType, config); - assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString()); - assertEquals("10", rows.get(0).getDimension("optionalPrimitive").get(0)); - assertEquals("9", rows.get(0).getDimension("requiredPrimitive").get(0)); - assertTrue(rows.get(0).getDimension("repeatedPrimitive").isEmpty()); - assertTrue(rows.get(0).getDimension("extractedOptional").isEmpty()); - assertEquals("9", rows.get(0).getDimension("extractedRequired").get(0)); - assertEquals("9", rows.get(0).getDimension("extractedRepeated").get(0)); - assertEquals("10", rows.get(0).getDimension("extractedRepeated").get(1)); + Assert.assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString()); + Assert.assertEquals("10", rows.get(0).getDimension("optionalPrimitive").get(0)); + Assert.assertEquals("9", rows.get(0).getDimension("requiredPrimitive").get(0)); + Assert.assertTrue(rows.get(0).getDimension("repeatedPrimitive").isEmpty()); + Assert.assertTrue(rows.get(0).getDimension("extractedOptional").isEmpty()); + Assert.assertEquals("9", rows.get(0).getDimension("extractedRequired").get(0)); + Assert.assertEquals("9", rows.get(0).getDimension("extractedRepeated").get(0)); + Assert.assertEquals("10", rows.get(0).getDimension("extractedRepeated").get(1)); } } diff --git a/extensions-core/parquet-extensions/src/test/java/org/apache/druid/data/input/parquet/DecimalParquetInputTest.java b/extensions-core/parquet-extensions/src/test/java/org/apache/druid/data/input/parquet/DecimalParquetInputTest.java index a587789fb48..de2f3f2cfca 100644 --- a/extensions-core/parquet-extensions/src/test/java/org/apache/druid/data/input/parquet/DecimalParquetInputTest.java +++ b/extensions-core/parquet-extensions/src/test/java/org/apache/druid/data/input/parquet/DecimalParquetInputTest.java @@ -22,8 +22,7 @@ package org.apache.druid.data.input.parquet; import com.google.common.collect.ImmutableList; import org.apache.druid.data.input.InputRow; import org.apache.druid.indexer.HadoopDruidIndexerConfig; -import org.apache.hadoop.conf.Configuration; -import org.apache.hadoop.mapreduce.Job; +import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -32,8 +31,6 @@ import java.io.IOException; import java.math.BigDecimal; import java.util.List; -import static org.junit.Assert.assertEquals; - @RunWith(Parameterized.class) public class DecimalParquetInputTest extends BaseParquetInputTest { @@ -47,12 +44,10 @@ public class DecimalParquetInputTest extends BaseParquetInputTest } private final String parserType; - private final Job job; public DecimalParquetInputTest(String parserType) throws IOException { this.parserType = parserType; - this.job = Job.getInstance(new Configuration()); } @Test @@ -68,9 +63,9 @@ public class DecimalParquetInputTest extends BaseParquetInputTest true ); List rows = getAllRows(parserType, config); - assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString()); - assertEquals("1.0", rows.get(0).getDimension("fixed_len_dec").get(0)); - assertEquals(new BigDecimal("1.0"), rows.get(0).getMetric("metric1")); + Assert.assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString()); + Assert.assertEquals("1.0", rows.get(0).getDimension("fixed_len_dec").get(0)); + Assert.assertEquals(new BigDecimal("1.0"), rows.get(0).getMetric("metric1")); } @Test @@ -86,9 +81,9 @@ public class DecimalParquetInputTest extends BaseParquetInputTest true ); List rows = getAllRows(parserType, config); - assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString()); - assertEquals("100", rows.get(0).getDimension("i32_dec").get(0)); - assertEquals(new BigDecimal(100), rows.get(0).getMetric("metric1")); + Assert.assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString()); + Assert.assertEquals("100", rows.get(0).getDimension("i32_dec").get(0)); + Assert.assertEquals(new BigDecimal(100), rows.get(0).getMetric("metric1")); } @Test @@ -104,8 +99,8 @@ public class DecimalParquetInputTest extends BaseParquetInputTest true ); List rows = getAllRows(parserType, config); - assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString()); - assertEquals("100", rows.get(0).getDimension("i64_dec").get(0)); - assertEquals(new BigDecimal(100), rows.get(0).getMetric("metric1")); + Assert.assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString()); + Assert.assertEquals("100", rows.get(0).getDimension("i64_dec").get(0)); + Assert.assertEquals(new BigDecimal(100), rows.get(0).getMetric("metric1")); } } diff --git a/extensions-core/parquet-extensions/src/test/java/org/apache/druid/data/input/parquet/FlattenSpecParquetInputTest.java b/extensions-core/parquet-extensions/src/test/java/org/apache/druid/data/input/parquet/FlattenSpecParquetInputTest.java index 28fa8bb265a..748086dbff7 100644 --- a/extensions-core/parquet-extensions/src/test/java/org/apache/druid/data/input/parquet/FlattenSpecParquetInputTest.java +++ b/extensions-core/parquet-extensions/src/test/java/org/apache/druid/data/input/parquet/FlattenSpecParquetInputTest.java @@ -33,13 +33,10 @@ import org.junit.runners.Parameterized; import java.io.IOException; import java.util.List; -import static org.junit.Assert.assertEquals; - @RunWith(Parameterized.class) public class FlattenSpecParquetInputTest extends BaseParquetInputTest { - private static String TS1 = "2018-09-18T00:18:00.023Z"; - + private static final String TS1 = "2018-09-18T00:18:00.023Z"; @Parameterized.Parameters(name = "type = {0}") public static Iterable constructorFeeder() @@ -71,13 +68,13 @@ public class FlattenSpecParquetInputTest extends BaseParquetInputTest Object data = getFirstRow(job, parserType, ((StaticPathSpec) config.getPathSpec()).getPaths()); List rows = (List) config.getParser().parseBatch(data); - assertEquals(TS1, rows.get(0).getTimestamp().toString()); - assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0)); - assertEquals("d2v1", rows.get(0).getDimension("dim2").get(0)); - assertEquals("1", rows.get(0).getDimension("dim3").get(0)); - assertEquals("listDim1v1", rows.get(0).getDimension("listDim").get(0)); - assertEquals("listDim1v2", rows.get(0).getDimension("listDim").get(1)); - assertEquals(1, rows.get(0).getMetric("metric1").longValue()); + Assert.assertEquals(TS1, rows.get(0).getTimestamp().toString()); + Assert.assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0)); + Assert.assertEquals("d2v1", rows.get(0).getDimension("dim2").get(0)); + Assert.assertEquals("1", rows.get(0).getDimension("dim3").get(0)); + Assert.assertEquals("listDim1v1", rows.get(0).getDimension("listDim").get(0)); + Assert.assertEquals("listDim1v2", rows.get(0).getDimension("listDim").get(1)); + Assert.assertEquals(1, rows.get(0).getMetric("metric1").longValue()); } @Test @@ -92,13 +89,13 @@ public class FlattenSpecParquetInputTest extends BaseParquetInputTest Object data = getFirstRow(job, parserType, ((StaticPathSpec) config.getPathSpec()).getPaths()); List rows = (List) config.getParser().parseBatch(data); - assertEquals(TS1, rows.get(0).getTimestamp().toString()); - assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0)); - assertEquals("d2v1", rows.get(0).getDimension("dim2").get(0)); - assertEquals("1", rows.get(0).getDimension("dim3").get(0)); - assertEquals("listDim1v1", rows.get(0).getDimension("listDim").get(0)); - assertEquals("listDim1v2", rows.get(0).getDimension("listDim").get(1)); - assertEquals(1, rows.get(0).getMetric("metric1").longValue()); + Assert.assertEquals(TS1, rows.get(0).getTimestamp().toString()); + Assert.assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0)); + Assert.assertEquals("d2v1", rows.get(0).getDimension("dim2").get(0)); + Assert.assertEquals("1", rows.get(0).getDimension("dim3").get(0)); + Assert.assertEquals("listDim1v1", rows.get(0).getDimension("listDim").get(0)); + Assert.assertEquals("listDim1v2", rows.get(0).getDimension("listDim").get(1)); + Assert.assertEquals(1, rows.get(0).getMetric("metric1").longValue()); } @Test @@ -113,13 +110,13 @@ public class FlattenSpecParquetInputTest extends BaseParquetInputTest Object data = getFirstRow(job, parserType, ((StaticPathSpec) config.getPathSpec()).getPaths()); List rows = (List) config.getParser().parseBatch(data); - assertEquals(TS1, rows.get(0).getTimestamp().toString()); - assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0)); - assertEquals("d2v1", rows.get(0).getDimension("dim2").get(0)); - assertEquals("1", rows.get(0).getDimension("dim3").get(0)); - assertEquals("listDim1v1", rows.get(0).getDimension("list").get(0)); - assertEquals("listDim1v2", rows.get(0).getDimension("list").get(1)); - assertEquals(1, rows.get(0).getMetric("metric1").longValue()); + Assert.assertEquals(TS1, rows.get(0).getTimestamp().toString()); + Assert.assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0)); + Assert.assertEquals("d2v1", rows.get(0).getDimension("dim2").get(0)); + Assert.assertEquals("1", rows.get(0).getDimension("dim3").get(0)); + Assert.assertEquals("listDim1v1", rows.get(0).getDimension("list").get(0)); + Assert.assertEquals("listDim1v2", rows.get(0).getDimension("list").get(1)); + Assert.assertEquals(1, rows.get(0).getMetric("metric1").longValue()); } @Test @@ -134,11 +131,11 @@ public class FlattenSpecParquetInputTest extends BaseParquetInputTest Object data = getFirstRow(job, parserType, ((StaticPathSpec) config.getPathSpec()).getPaths()); List rows = (List) config.getParser().parseBatch(data); - assertEquals(TS1, rows.get(0).getTimestamp().toString()); - assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0)); - assertEquals("d2v1", rows.get(0).getDimension("dim2").get(0)); - assertEquals("listDim1v2", rows.get(0).getDimension("listextracted").get(0)); - assertEquals(1, rows.get(0).getMetric("metric1").longValue()); + Assert.assertEquals(TS1, rows.get(0).getTimestamp().toString()); + Assert.assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0)); + Assert.assertEquals("d2v1", rows.get(0).getDimension("dim2").get(0)); + Assert.assertEquals("listDim1v2", rows.get(0).getDimension("listextracted").get(0)); + Assert.assertEquals(1, rows.get(0).getMetric("metric1").longValue()); } @@ -154,14 +151,14 @@ public class FlattenSpecParquetInputTest extends BaseParquetInputTest Object data = getFirstRow(job, parserType, ((StaticPathSpec) config.getPathSpec()).getPaths()); List rows = (List) config.getParser().parseBatch(data); - assertEquals(TS1, rows.get(0).getTimestamp().toString()); - assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0)); + Assert.assertEquals(TS1, rows.get(0).getTimestamp().toString()); + Assert.assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0)); List dims = rows.get(0).getDimensions(); Assert.assertFalse(dims.contains("dim2")); Assert.assertFalse(dims.contains("dim3")); Assert.assertFalse(dims.contains("listDim")); Assert.assertFalse(dims.contains("nestedData")); - assertEquals(1, rows.get(0).getMetric("metric1").longValue()); + Assert.assertEquals(1, rows.get(0).getMetric("metric1").longValue()); } @Test @@ -176,13 +173,13 @@ public class FlattenSpecParquetInputTest extends BaseParquetInputTest Object data = getFirstRow(job, parserType, ((StaticPathSpec) config.getPathSpec()).getPaths()); List rows = (List) config.getParser().parseBatch(data); - assertEquals(TS1, rows.get(0).getTimestamp().toString()); - assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0)); + Assert.assertEquals(TS1, rows.get(0).getTimestamp().toString()); + Assert.assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0)); List dims = rows.get(0).getDimensions(); Assert.assertFalse(dims.contains("dim2")); Assert.assertFalse(dims.contains("dim3")); Assert.assertFalse(dims.contains("listDim")); - assertEquals(1, rows.get(0).getMetric("metric1").longValue()); + Assert.assertEquals(1, rows.get(0).getMetric("metric1").longValue()); } @Test @@ -197,14 +194,14 @@ public class FlattenSpecParquetInputTest extends BaseParquetInputTest Object data = getFirstRow(job, parserType, ((StaticPathSpec) config.getPathSpec()).getPaths()); List rows = (List) config.getParser().parseBatch(data); - assertEquals(TS1, rows.get(0).getTimestamp().toString()); - assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0)); - assertEquals("d2v1", rows.get(0).getDimension("dim2").get(0)); - assertEquals("1", rows.get(0).getDimension("dim3").get(0)); - assertEquals("listDim1v1", rows.get(0).getDimension("listDim").get(0)); - assertEquals("listDim1v2", rows.get(0).getDimension("listDim").get(1)); - assertEquals(1, rows.get(0).getMetric("metric1").longValue()); - assertEquals(2, rows.get(0).getMetric("metric2").longValue()); + Assert.assertEquals(TS1, rows.get(0).getTimestamp().toString()); + Assert.assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0)); + Assert.assertEquals("d2v1", rows.get(0).getDimension("dim2").get(0)); + Assert.assertEquals("1", rows.get(0).getDimension("dim3").get(0)); + Assert.assertEquals("listDim1v1", rows.get(0).getDimension("listDim").get(0)); + Assert.assertEquals("listDim1v2", rows.get(0).getDimension("listDim").get(1)); + Assert.assertEquals(1, rows.get(0).getMetric("metric1").longValue()); + Assert.assertEquals(2, rows.get(0).getMetric("metric2").longValue()); } @Test @@ -219,12 +216,11 @@ public class FlattenSpecParquetInputTest extends BaseParquetInputTest Object data = getFirstRow(job, parserType, ((StaticPathSpec) config.getPathSpec()).getPaths()); List rows = (List) config.getParser().parseBatch(data); - assertEquals(TS1, rows.get(0).getTimestamp().toString()); - assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0)); - assertEquals("d2v1", rows.get(0).getDimension("dim2").get(0)); - assertEquals("1", rows.get(0).getDimension("dim3").get(0)); - assertEquals("listDim1v2", rows.get(0).getDimension("listextracted").get(0)); - assertEquals(1, rows.get(0).getMetric("metric1").longValue()); + Assert.assertEquals(TS1, rows.get(0).getTimestamp().toString()); + Assert.assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0)); + Assert.assertEquals("d2v1", rows.get(0).getDimension("dim2").get(0)); + Assert.assertEquals("1", rows.get(0).getDimension("dim3").get(0)); + Assert.assertEquals("listDim1v2", rows.get(0).getDimension("listextracted").get(0)); + Assert.assertEquals(1, rows.get(0).getMetric("metric1").longValue()); } - } diff --git a/extensions-core/parquet-extensions/src/test/java/org/apache/druid/data/input/parquet/TimestampsParquetInputTest.java b/extensions-core/parquet-extensions/src/test/java/org/apache/druid/data/input/parquet/TimestampsParquetInputTest.java index a451da700d7..8303723966f 100644 --- a/extensions-core/parquet-extensions/src/test/java/org/apache/druid/data/input/parquet/TimestampsParquetInputTest.java +++ b/extensions-core/parquet-extensions/src/test/java/org/apache/druid/data/input/parquet/TimestampsParquetInputTest.java @@ -25,6 +25,7 @@ import org.apache.druid.indexer.HadoopDruidIndexerConfig; import org.apache.druid.indexer.path.StaticPathSpec; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.Job; +import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -32,8 +33,6 @@ import org.junit.runners.Parameterized; import java.io.IOException; import java.util.List; -import static org.junit.Assert.assertEquals; - @RunWith(Parameterized.class) public class TimestampsParquetInputTest extends BaseParquetInputTest { @@ -70,10 +69,10 @@ public class TimestampsParquetInputTest extends BaseParquetInputTest ); List rowsWithString = getAllRows(parserType, configTimeAsString); List rowsWithDate = getAllRows(parserType, configTimeAsDate); - assertEquals(rowsWithDate.size(), rowsWithString.size()); + Assert.assertEquals(rowsWithDate.size(), rowsWithString.size()); for (int i = 0; i < rowsWithDate.size(); i++) { - assertEquals(rowsWithString.get(i).getTimestamp(), rowsWithDate.get(i).getTimestamp()); + Assert.assertEquals(rowsWithString.get(i).getTimestamp(), rowsWithDate.get(i).getTimestamp()); } } @@ -96,7 +95,7 @@ public class TimestampsParquetInputTest extends BaseParquetInputTest Object data = getFirstRow(job, parserType, ((StaticPathSpec) config.getPathSpec()).getPaths()); List rows = (List) config.getParser().parseBatch(data); - assertEquals("2001-01-01T01:01:01.000Z", rows.get(0).getTimestamp().toString()); + Assert.assertEquals("2001-01-01T01:01:01.000Z", rows.get(0).getTimestamp().toString()); } @Test @@ -109,6 +108,6 @@ public class TimestampsParquetInputTest extends BaseParquetInputTest ); config.intoConfiguration(job); List rows = getAllRows(parserType, config); - assertEquals("1970-01-01T00:00:00.010Z", rows.get(0).getTimestamp().toString()); + Assert.assertEquals("1970-01-01T00:00:00.010Z", rows.get(0).getTimestamp().toString()); } } diff --git a/extensions-core/parquet-extensions/src/test/java/org/apache/druid/data/input/parquet/WikiParquetInputTest.java b/extensions-core/parquet-extensions/src/test/java/org/apache/druid/data/input/parquet/WikiParquetInputTest.java index 07ae255406f..79a59a6e61b 100644 --- a/extensions-core/parquet-extensions/src/test/java/org/apache/druid/data/input/parquet/WikiParquetInputTest.java +++ b/extensions-core/parquet-extensions/src/test/java/org/apache/druid/data/input/parquet/WikiParquetInputTest.java @@ -25,6 +25,7 @@ import org.apache.druid.indexer.HadoopDruidIndexerConfig; import org.apache.druid.indexer.path.StaticPathSpec; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.mapreduce.Job; +import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -32,8 +33,6 @@ import org.junit.runners.Parameterized; import java.io.IOException; import java.util.List; -import static org.junit.Assert.assertEquals; - @RunWith(Parameterized.class) public class WikiParquetInputTest extends BaseParquetInputTest { @@ -67,10 +66,10 @@ public class WikiParquetInputTest extends BaseParquetInputTest Object data = getFirstRow(job, parserType, ((StaticPathSpec) config.getPathSpec()).getPaths()); List rows = (List) config.getParser().parseBatch(data); - assertEquals(rows.get(0).getDimension("page").get(0), "Gypsy Danger"); + Assert.assertEquals("Gypsy Danger", rows.get(0).getDimension("page").get(0)); String s1 = rows.get(0).getDimension("language").get(0); String s2 = rows.get(0).getDimension("language").get(1); - assertEquals("en", s1); - assertEquals("zh", s2); + Assert.assertEquals("en", s1); + Assert.assertEquals("zh", s2); } } diff --git a/extensions-core/protobuf-extensions/src/test/java/org/apache/druid/data/input/protobuf/ProtobufInputRowParserTest.java b/extensions-core/protobuf-extensions/src/test/java/org/apache/druid/data/input/protobuf/ProtobufInputRowParserTest.java index 081fc23a24f..a2c8e9c8ba8 100644 --- a/extensions-core/protobuf-extensions/src/test/java/org/apache/druid/data/input/protobuf/ProtobufInputRowParserTest.java +++ b/extensions-core/protobuf-extensions/src/test/java/org/apache/druid/data/input/protobuf/ProtobufInputRowParserTest.java @@ -36,6 +36,7 @@ import org.apache.druid.js.JavaScriptConfig; import org.hamcrest.CoreMatchers; import org.joda.time.DateTime; import org.joda.time.chrono.ISOChronology; +import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -45,8 +46,6 @@ import java.io.ByteArrayOutputStream; import java.nio.ByteBuffer; import java.util.List; -import static org.junit.Assert.assertEquals; - public class ProtobufInputRowParserTest { @Rule @@ -127,12 +126,11 @@ public class ProtobufInputRowParserTest @Test public void testParse() throws Exception { - //configure parser with desc file ProtobufInputRowParser parser = new ProtobufInputRowParser(parseSpec, "prototest.desc", "ProtoTestEvent"); //create binary of proto test event - DateTime dateTime = new DateTime(2012, 07, 12, 9, 30, ISOChronology.getInstanceUTC()); + DateTime dateTime = new DateTime(2012, 7, 12, 9, 30, ISOChronology.getInstanceUTC()); ProtoTestEventWrapper.ProtoTestEvent event = ProtoTestEventWrapper.ProtoTestEvent.newBuilder() .setDescription("description") .setEventType(ProtoTestEventWrapper.ProtoTestEvent.EventCategory.CATEGORY_ONE) @@ -160,7 +158,7 @@ public class ProtobufInputRowParserTest InputRow row = parser.parseBatch(ByteBuffer.wrap(out.toByteArray())).get(0); System.out.println(row); - assertEquals(dateTime.getMillis(), row.getTimestampFromEpoch()); + Assert.assertEquals(dateTime.getMillis(), row.getTimestampFromEpoch()); assertDimensionEquals(row, "id", "4711"); assertDimensionEquals(row, "isValid", "true"); @@ -172,9 +170,9 @@ public class ProtobufInputRowParserTest assertDimensionEquals(row, "bar0", "bar0"); - assertEquals(47.11F, row.getMetric("someFloatColumn").floatValue(), 0.0); - assertEquals(815.0F, row.getMetric("someIntColumn").floatValue(), 0.0); - assertEquals(816.0F, row.getMetric("someLongColumn").floatValue(), 0.0); + Assert.assertEquals(47.11F, row.getMetric("someFloatColumn").floatValue(), 0.0); + Assert.assertEquals(815.0F, row.getMetric("someIntColumn").floatValue(), 0.0); + Assert.assertEquals(816.0F, row.getMetric("someLongColumn").floatValue(), 0.0); } @Test @@ -200,13 +198,14 @@ public class ProtobufInputRowParserTest expectedException.expect(CoreMatchers.instanceOf(IllegalStateException.class)); expectedException.expectMessage("JavaScript is disabled"); + //noinspection ResultOfMethodCallIgnored (this method call will trigger the expected exception) parser.parseBatch(ByteBuffer.allocate(1)).get(0); } private void assertDimensionEquals(InputRow row, String dimension, Object expected) { List values = row.getDimension(dimension); - assertEquals(1, values.size()); - assertEquals(expected, values.get(0)); + Assert.assertEquals(1, values.size()); + Assert.assertEquals(expected, values.get(0)); } } diff --git a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/TestAWSCredentialsProvider.java b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/TestAWSCredentialsProvider.java index d2ec870fc6f..7f8e2671573 100644 --- a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/TestAWSCredentialsProvider.java +++ b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/TestAWSCredentialsProvider.java @@ -30,6 +30,7 @@ import org.apache.druid.common.aws.AWSModule; import org.apache.druid.common.aws.AWSProxyConfig; import org.apache.druid.metadata.DefaultPasswordProvider; import org.easymock.EasyMock; +import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -39,9 +40,6 @@ import java.io.File; import java.io.IOException; import java.nio.charset.StandardCharsets; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertTrue; - public class TestAWSCredentialsProvider { @Rule @@ -60,8 +58,8 @@ public class TestAWSCredentialsProvider AWSCredentialsProvider provider = awsModule.getAWSCredentialsProvider(config); AWSCredentials credentials = provider.getCredentials(); - assertEquals(credentials.getAWSAccessKeyId(), "accessKeySample"); - assertEquals(credentials.getAWSSecretKey(), "secretKeySample"); + Assert.assertEquals("accessKeySample", credentials.getAWSAccessKeyId()); + Assert.assertEquals("secretKeySample", credentials.getAWSSecretKey()); // try to create s3Module.getAmazonS3Client( @@ -88,11 +86,11 @@ public class TestAWSCredentialsProvider AWSCredentialsProvider provider = awsModule.getAWSCredentialsProvider(config); AWSCredentials credentials = provider.getCredentials(); - assertTrue(credentials instanceof AWSSessionCredentials); + Assert.assertTrue(credentials instanceof AWSSessionCredentials); AWSSessionCredentials sessionCredentials = (AWSSessionCredentials) credentials; - assertEquals(sessionCredentials.getAWSAccessKeyId(), "accessKeySample"); - assertEquals(sessionCredentials.getAWSSecretKey(), "secretKeySample"); - assertEquals(sessionCredentials.getSessionToken(), "sessionTokenSample"); + Assert.assertEquals("accessKeySample", sessionCredentials.getAWSAccessKeyId()); + Assert.assertEquals("secretKeySample", sessionCredentials.getAWSSecretKey()); + Assert.assertEquals("sessionTokenSample", sessionCredentials.getSessionToken()); // try to create s3Module.getAmazonS3Client( diff --git a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/TestFileSessionCredentialsProvider.java b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/TestFileSessionCredentialsProvider.java index cfa94188b8d..3e29c75e6d7 100644 --- a/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/TestFileSessionCredentialsProvider.java +++ b/extensions-core/s3-extensions/src/test/java/org/apache/druid/storage/s3/TestFileSessionCredentialsProvider.java @@ -22,6 +22,7 @@ package org.apache.druid.storage.s3; import com.amazonaws.auth.AWSSessionCredentials; import com.google.common.io.Files; import org.apache.druid.common.aws.FileSessionCredentialsProvider; +import org.junit.Assert; import org.junit.Rule; import org.junit.Test; import org.junit.rules.TemporaryFolder; @@ -31,8 +32,6 @@ import java.io.File; import java.io.IOException; import java.nio.charset.StandardCharsets; -import static org.junit.Assert.assertEquals; - public class TestFileSessionCredentialsProvider { @Rule @@ -48,8 +47,8 @@ public class TestFileSessionCredentialsProvider FileSessionCredentialsProvider provider = new FileSessionCredentialsProvider(file.getAbsolutePath()); AWSSessionCredentials sessionCredentials = (AWSSessionCredentials) provider.getCredentials(); - assertEquals(sessionCredentials.getSessionToken(), "sessionTokenSample"); - assertEquals(sessionCredentials.getAWSAccessKeyId(), "accessKeySample"); - assertEquals(sessionCredentials.getAWSSecretKey(), "secretKeySample"); + Assert.assertEquals("sessionTokenSample", sessionCredentials.getSessionToken()); + Assert.assertEquals("accessKeySample", sessionCredentials.getAWSAccessKeyId()); + Assert.assertEquals("secretKeySample", sessionCredentials.getAWSSecretKey()); } } diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/common/actions/RemoteTaskActionClientTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/common/actions/RemoteTaskActionClientTest.java index ec28edfb455..10d24a9872b 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/common/actions/RemoteTaskActionClientTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/common/actions/RemoteTaskActionClientTest.java @@ -48,16 +48,13 @@ import java.util.HashMap; import java.util.List; import java.util.Map; -import static org.easymock.EasyMock.expect; -import static org.easymock.EasyMock.replay; - public class RemoteTaskActionClientTest { @Rule public ExpectedException expectedException = ExpectedException.none(); private DruidLeaderClient druidLeaderClient; - private ObjectMapper objectMapper = new DefaultObjectMapper(); + private final ObjectMapper objectMapper = new DefaultObjectMapper(); @Before public void setUp() @@ -69,11 +66,11 @@ public class RemoteTaskActionClientTest public void testSubmitSimple() throws Exception { Request request = new Request(HttpMethod.POST, new URL("http://localhost:1234/xx")); - expect(druidLeaderClient.makeRequest(HttpMethod.POST, "/druid/indexer/v1/action")) - .andReturn(request); + EasyMock.expect(druidLeaderClient.makeRequest(HttpMethod.POST, "/druid/indexer/v1/action")) + .andReturn(request); // return status code 200 and a list with size equals 1 - Map responseBody = new HashMap(); + Map responseBody = new HashMap<>(); final List expectedLocks = Collections.singletonList(new TaskLock( TaskLockType.SHARED, "groupId", @@ -91,8 +88,8 @@ public class RemoteTaskActionClientTest ); // set up mocks - expect(druidLeaderClient.go(request)).andReturn(responseHolder); - replay(druidLeaderClient); + EasyMock.expect(druidLeaderClient.go(request)).andReturn(responseHolder); + EasyMock.replay(druidLeaderClient); Task task = new NoopTask("id", null, 0, 0, null, null, null); RemoteTaskActionClient client = new RemoteTaskActionClient( @@ -112,8 +109,8 @@ public class RemoteTaskActionClientTest { // return status code 400 and a list with size equals 1 Request request = new Request(HttpMethod.POST, new URL("http://localhost:1234/xx")); - expect(druidLeaderClient.makeRequest(HttpMethod.POST, "/druid/indexer/v1/action")) - .andReturn(request); + EasyMock.expect(druidLeaderClient.makeRequest(HttpMethod.POST, "/druid/indexer/v1/action")) + .andReturn(request); // return status code 200 and a list with size equals 1 FullResponseHolder responseHolder = new FullResponseHolder( @@ -123,8 +120,8 @@ public class RemoteTaskActionClientTest ); // set up mocks - expect(druidLeaderClient.go(request)).andReturn(responseHolder); - replay(druidLeaderClient); + EasyMock.expect(druidLeaderClient.go(request)).andReturn(responseHolder); + EasyMock.replay(druidLeaderClient); Task task = new NoopTask("id", null, 0, 0, null, null, null); RemoteTaskActionClient client = new RemoteTaskActionClient( diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/supervisor/SupervisorManagerTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/supervisor/SupervisorManagerTest.java index 2bdddb87cef..6f62947d556 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/overlord/supervisor/SupervisorManagerTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/overlord/supervisor/SupervisorManagerTest.java @@ -40,10 +40,6 @@ import java.util.ArrayList; import java.util.List; import java.util.Map; -import static org.easymock.EasyMock.anyObject; -import static org.easymock.EasyMock.capture; -import static org.easymock.EasyMock.eq; - @RunWith(EasyMockRunner.class) public class SupervisorManagerTest extends EasyMockSupport { @@ -107,7 +103,7 @@ public class SupervisorManagerTest extends EasyMockSupport verifyAll(); resetAll(); - metadataSupervisorManager.insert(eq("id1"), anyObject(NoopSupervisorSpec.class)); + metadataSupervisorManager.insert(EasyMock.eq("id1"), EasyMock.anyObject(NoopSupervisorSpec.class)); supervisor2.stop(true); replayAll(); @@ -314,7 +310,7 @@ public class SupervisorManagerTest extends EasyMockSupport // mock suspend, which stops supervisor1 and sets suspended state in metadata, flipping to supervisor2 // in TestSupervisorSpec implementation of createSuspendedSpec resetAll(); - metadataSupervisorManager.insert(eq("id1"), capture(capturedInsert)); + metadataSupervisorManager.insert(EasyMock.eq("id1"), EasyMock.capture(capturedInsert)); supervisor2.start(); supervisor1.stop(true); replayAll(); @@ -328,7 +324,7 @@ public class SupervisorManagerTest extends EasyMockSupport // mock resume, which stops supervisor2 and sets suspended to false in metadata, flipping to supervisor1 // in TestSupervisorSpec implementation of createRunningSpec resetAll(); - metadataSupervisorManager.insert(eq("id1"), capture(capturedInsert)); + metadataSupervisorManager.insert(EasyMock.eq("id1"), EasyMock.capture(capturedInsert)); supervisor2.stop(true); supervisor1.start(); replayAll(); @@ -341,7 +337,7 @@ public class SupervisorManagerTest extends EasyMockSupport // mock stop of suspended then resumed supervisor resetAll(); - metadataSupervisorManager.insert(eq("id1"), anyObject(NoopSupervisorSpec.class)); + metadataSupervisorManager.insert(EasyMock.eq("id1"), EasyMock.anyObject(NoopSupervisorSpec.class)); supervisor1.stop(true); replayAll(); @@ -371,12 +367,12 @@ public class SupervisorManagerTest extends EasyMockSupport private final Supervisor suspendedSupervisor; - public TestSupervisorSpec(String id, Supervisor supervisor) + TestSupervisorSpec(String id, Supervisor supervisor) { this(id, supervisor, false, null); } - public TestSupervisorSpec(String id, Supervisor supervisor, boolean suspended, Supervisor suspendedSupervisor) + TestSupervisorSpec(String id, Supervisor supervisor, boolean suspended, Supervisor suspendedSupervisor) { this.id = id; this.supervisor = supervisor; diff --git a/indexing-service/src/test/java/org/apache/druid/indexing/seekablestream/supervisor/SeekableStreamSupervisorStateTest.java b/indexing-service/src/test/java/org/apache/druid/indexing/seekablestream/supervisor/SeekableStreamSupervisorStateTest.java index 1058eb01e07..ae55fffc637 100644 --- a/indexing-service/src/test/java/org/apache/druid/indexing/seekablestream/supervisor/SeekableStreamSupervisorStateTest.java +++ b/indexing-service/src/test/java/org/apache/druid/indexing/seekablestream/supervisor/SeekableStreamSupervisorStateTest.java @@ -69,6 +69,7 @@ import org.apache.druid.segment.indexing.DataSchema; import org.apache.druid.segment.indexing.granularity.UniformGranularitySpec; import org.apache.druid.segment.realtime.firehose.ChatHandlerProvider; import org.apache.druid.server.security.AuthorizerMapper; +import org.easymock.EasyMock; import org.easymock.EasyMockSupport; import org.joda.time.DateTime; import org.joda.time.Duration; @@ -89,12 +90,6 @@ import java.util.TreeMap; import java.util.concurrent.Executor; import java.util.concurrent.ScheduledExecutorService; -import static org.easymock.EasyMock.anyInt; -import static org.easymock.EasyMock.anyLong; -import static org.easymock.EasyMock.anyObject; -import static org.easymock.EasyMock.anyString; -import static org.easymock.EasyMock.expect; - public class SeekableStreamSupervisorStateTest extends EasyMockSupport { private static final ObjectMapper objectMapper = TestHelper.makeJsonMapper(); @@ -134,31 +129,37 @@ public class SeekableStreamSupervisorStateTest extends EasyMockSupport supervisorConfig = new SupervisorStateManagerConfig(); - expect(spec.getSupervisorStateManagerConfig()).andReturn(supervisorConfig).anyTimes(); + EasyMock.expect(spec.getSupervisorStateManagerConfig()).andReturn(supervisorConfig).anyTimes(); - expect(spec.getDataSchema()).andReturn(getDataSchema()).anyTimes(); - expect(spec.getIoConfig()).andReturn(getIOConfig()).anyTimes(); - expect(spec.getTuningConfig()).andReturn(getTuningConfig()).anyTimes(); + EasyMock.expect(spec.getDataSchema()).andReturn(getDataSchema()).anyTimes(); + EasyMock.expect(spec.getIoConfig()).andReturn(getIOConfig()).anyTimes(); + EasyMock.expect(spec.getTuningConfig()).andReturn(getTuningConfig()).anyTimes(); - expect(taskClientFactory.build(anyObject(), anyString(), anyInt(), anyObject(), anyLong())).andReturn( + EasyMock.expect(taskClientFactory.build( + EasyMock.anyObject(), + EasyMock.anyString(), + EasyMock.anyInt(), + EasyMock.anyObject(), + EasyMock.anyLong() + )).andReturn( indexTaskClient).anyTimes(); - expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); - expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); + EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes(); + EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes(); - taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class)); + taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class)); - expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn(null).anyTimes(); - expect(recordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard0Partition)).anyTimes(); - expect(recordSupplier.getLatestSequenceNumber(anyObject())).andReturn("10").anyTimes(); + EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn(null).anyTimes(); + EasyMock.expect(recordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard0Partition)).anyTimes(); + EasyMock.expect(recordSupplier.getLatestSequenceNumber(EasyMock.anyObject())).andReturn("10").anyTimes(); } @Test public void testRunning() throws Exception { - expect(spec.isSuspended()).andReturn(false).anyTimes(); - expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - expect(taskQueue.add(anyObject())).andReturn(true).anyTimes(); + EasyMock.expect(spec.isSuspended()).andReturn(false).anyTimes(); + EasyMock.expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + EasyMock.expect(taskQueue.add(EasyMock.anyObject())).andReturn(true).anyTimes(); replayAll(); @@ -194,11 +195,12 @@ public class SeekableStreamSupervisorStateTest extends EasyMockSupport @Test public void testConnectingToStreamFail() throws Exception { - expect(spec.isSuspended()).andReturn(false).anyTimes(); - expect(recordSupplier.getPartitionIds(STREAM)).andThrow(new StreamException(new IllegalStateException(EXCEPTION_MSG))) - .anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - expect(taskQueue.add(anyObject())).andReturn(true).anyTimes(); + EasyMock.expect(spec.isSuspended()).andReturn(false).anyTimes(); + EasyMock.expect(recordSupplier.getPartitionIds(STREAM)) + .andThrow(new StreamException(new IllegalStateException(EXCEPTION_MSG))) + .anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + EasyMock.expect(taskQueue.add(EasyMock.anyObject())).andReturn(true).anyTimes(); replayAll(); @@ -248,13 +250,17 @@ public class SeekableStreamSupervisorStateTest extends EasyMockSupport @Test public void testConnectingToStreamFailRecoveryFailRecovery() throws Exception { - expect(spec.isSuspended()).andReturn(false).anyTimes(); - expect(recordSupplier.getPartitionIds(STREAM)).andThrow(new StreamException(new IllegalStateException())).times(3); - expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).times(3); - expect(recordSupplier.getPartitionIds(STREAM)).andThrow(new StreamException(new IllegalStateException())).times(3); - expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).times(3); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - expect(taskQueue.add(anyObject())).andReturn(true).anyTimes(); + EasyMock.expect(spec.isSuspended()).andReturn(false).anyTimes(); + EasyMock.expect(recordSupplier.getPartitionIds(STREAM)) + .andThrow(new StreamException(new IllegalStateException())) + .times(3); + EasyMock.expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).times(3); + EasyMock.expect(recordSupplier.getPartitionIds(STREAM)) + .andThrow(new StreamException(new IllegalStateException())) + .times(3); + EasyMock.expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).times(3); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + EasyMock.expect(taskQueue.add(EasyMock.anyObject())).andReturn(true).anyTimes(); replayAll(); @@ -317,12 +323,12 @@ public class SeekableStreamSupervisorStateTest extends EasyMockSupport @Test public void testDiscoveringInitialTasksFailRecoveryFail() throws Exception { - expect(spec.isSuspended()).andReturn(false).anyTimes(); - expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).anyTimes(); - expect(taskStorage.getActiveTasks()).andThrow(new IllegalStateException(EXCEPTION_MSG)).times(3); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).times(3); - expect(taskStorage.getActiveTasks()).andThrow(new IllegalStateException(EXCEPTION_MSG)).times(3); - expect(taskQueue.add(anyObject())).andReturn(true).anyTimes(); + EasyMock.expect(spec.isSuspended()).andReturn(false).anyTimes(); + EasyMock.expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andThrow(new IllegalStateException(EXCEPTION_MSG)).times(3); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).times(3); + EasyMock.expect(taskStorage.getActiveTasks()).andThrow(new IllegalStateException(EXCEPTION_MSG)).times(3); + EasyMock.expect(taskQueue.add(EasyMock.anyObject())).andReturn(true).anyTimes(); replayAll(); @@ -393,12 +399,12 @@ public class SeekableStreamSupervisorStateTest extends EasyMockSupport @Test public void testCreatingTasksFailRecoveryFail() throws Exception { - expect(spec.isSuspended()).andReturn(false).anyTimes(); - expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - expect(taskQueue.add(anyObject())).andThrow(new IllegalStateException(EXCEPTION_MSG)).times(3); - expect(taskQueue.add(anyObject())).andReturn(true).times(3); - expect(taskQueue.add(anyObject())).andThrow(new IllegalStateException(EXCEPTION_MSG)).times(3); + EasyMock.expect(spec.isSuspended()).andReturn(false).anyTimes(); + EasyMock.expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + EasyMock.expect(taskQueue.add(EasyMock.anyObject())).andThrow(new IllegalStateException(EXCEPTION_MSG)).times(3); + EasyMock.expect(taskQueue.add(EasyMock.anyObject())).andReturn(true).times(3); + EasyMock.expect(taskQueue.add(EasyMock.anyObject())).andThrow(new IllegalStateException(EXCEPTION_MSG)).times(3); replayAll(); @@ -470,10 +476,10 @@ public class SeekableStreamSupervisorStateTest extends EasyMockSupport @Test public void testSuspended() throws Exception { - expect(spec.isSuspended()).andReturn(true).anyTimes(); - expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - expect(taskQueue.add(anyObject())).andReturn(true).anyTimes(); + EasyMock.expect(spec.isSuspended()).andReturn(true).anyTimes(); + EasyMock.expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + EasyMock.expect(taskQueue.add(EasyMock.anyObject())).andReturn(true).anyTimes(); replayAll(); @@ -509,10 +515,10 @@ public class SeekableStreamSupervisorStateTest extends EasyMockSupport @Test public void testStopping() throws Exception { - expect(spec.isSuspended()).andReturn(false).anyTimes(); - expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).anyTimes(); - expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); - expect(taskQueue.add(anyObject())).andReturn(true).anyTimes(); + EasyMock.expect(spec.isSuspended()).andReturn(false).anyTimes(); + EasyMock.expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).anyTimes(); + EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes(); + EasyMock.expect(taskQueue.add(EasyMock.anyObject())).andReturn(true).anyTimes(); taskRunner.unregisterListener("testSupervisorId"); indexTaskClient.close(); diff --git a/processing/src/test/java/org/apache/druid/query/aggregation/AggregatorUtilTest.java b/processing/src/test/java/org/apache/druid/query/aggregation/AggregatorUtilTest.java index f6ff6db55c5..49f92b75a4b 100644 --- a/processing/src/test/java/org/apache/druid/query/aggregation/AggregatorUtilTest.java +++ b/processing/src/test/java/org/apache/druid/query/aggregation/AggregatorUtilTest.java @@ -34,8 +34,6 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; -import static org.apache.druid.query.QueryRunnerTestHelper.dependentPostAggMetric; - public class AggregatorUtilTest { @@ -130,7 +128,7 @@ public class AggregatorUtilTest Pair, List> aggregatorsPair = AggregatorUtil.condensedAggregators( aggregatorFactories, postAggregatorList, - dependentPostAggMetric + QueryRunnerTestHelper.dependentPostAggMetric ); // verify aggregators Assert.assertEquals( diff --git a/processing/src/test/java/org/apache/druid/query/cache/CacheKeyBuilderTest.java b/processing/src/test/java/org/apache/druid/query/cache/CacheKeyBuilderTest.java index 3133044fea9..13077b9d978 100644 --- a/processing/src/test/java/org/apache/druid/query/cache/CacheKeyBuilderTest.java +++ b/processing/src/test/java/org/apache/druid/query/cache/CacheKeyBuilderTest.java @@ -23,6 +23,7 @@ import com.google.common.collect.ImmutableList; import com.google.common.collect.Lists; import org.apache.druid.java.util.common.Cacheable; import org.apache.druid.java.util.common.StringUtils; +import org.junit.Assert; import org.junit.Test; import java.nio.ByteBuffer; @@ -31,23 +32,12 @@ import java.util.Arrays; import java.util.Collections; import java.util.List; -import static org.junit.Assert.assertArrayEquals; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; - public class CacheKeyBuilderTest { @Test public void testCacheKeyBuilder() { - final Cacheable cacheable = new Cacheable() - { - @Override - public byte[] getCacheKey() - { - return new byte[]{10, 20}; - } - }; + final Cacheable cacheable = () -> new byte[]{10, 20}; final byte[] actual = new CacheKeyBuilder((byte) 10) .appendBoolean(false) @@ -75,7 +65,7 @@ public class CacheKeyBuilderTest + cacheable.getCacheKey().length // cacheable + Integer.BYTES + 4 // cacheable list + 11; // type keys - assertEquals(expectedSize, actual.length); + Assert.assertEquals(expectedSize, actual.length); final byte[] expected = ByteBuffer.allocate(expectedSize) .put((byte) 10) @@ -108,7 +98,7 @@ public class CacheKeyBuilderTest .put(cacheable.getCacheKey()) .array(); - assertArrayEquals(expected, actual); + Assert.assertArrayEquals(expected, actual); } @Test @@ -122,25 +112,11 @@ public class CacheKeyBuilderTest .appendStringsIgnoringOrder(Lists.newArrayList("BA", "AB")) .build(); - assertArrayEquals(key1, key2); + Assert.assertArrayEquals(key1, key2); - final Cacheable cacheable1 = new Cacheable() - { - @Override - public byte[] getCacheKey() - { - return new byte[]{1}; - } - }; + final Cacheable cacheable1 = () -> new byte[]{1}; - final Cacheable cacheable2 = new Cacheable() - { - @Override - public byte[] getCacheKey() - { - return new byte[]{2}; - } - }; + final Cacheable cacheable2 = () -> new byte[]{2}; key1 = new CacheKeyBuilder((byte) 10) .appendCacheablesIgnoringOrder(Lists.newArrayList(cacheable1, cacheable2)) @@ -150,7 +126,7 @@ public class CacheKeyBuilderTest .appendCacheablesIgnoringOrder(Lists.newArrayList(cacheable2, cacheable1)) .build(); - assertArrayEquals(key1, key2); + Assert.assertArrayEquals(key1, key2); } @Test @@ -222,23 +198,9 @@ public class CacheKeyBuilderTest @Test public void testNotEqualCacheables() { - final Cacheable test = new Cacheable() - { - @Override - public byte[] getCacheKey() - { - return StringUtils.toUtf8("test"); - } - }; + final Cacheable test = () -> StringUtils.toUtf8("test"); - final Cacheable testtest = new Cacheable() - { - @Override - public byte[] getCacheKey() - { - return StringUtils.toUtf8("testtest"); - } - }; + final Cacheable testtest = () -> StringUtils.toUtf8("testtest"); final List keys = new ArrayList<>(); keys.add( @@ -287,7 +249,7 @@ public class CacheKeyBuilderTest { for (int i = 0; i < keys.size(); i++) { for (int j = i + 1; j < keys.size(); j++) { - assertFalse(Arrays.equals(keys.get(i), keys.get(j))); + Assert.assertFalse(Arrays.equals(keys.get(i), keys.get(j))); } } } @@ -303,17 +265,17 @@ public class CacheKeyBuilderTest .appendStrings(Collections.singletonList("")) .build(); - assertFalse(Arrays.equals(key1, key2)); + Assert.assertFalse(Arrays.equals(key1, key2)); key1 = new CacheKeyBuilder((byte) 10) .appendStrings(Collections.singletonList("")) .build(); key2 = new CacheKeyBuilder((byte) 10) - .appendStrings(Collections.singletonList((String) null)) + .appendStrings(Collections.singletonList(null)) .build(); - assertArrayEquals(key1, key2); + Assert.assertArrayEquals(key1, key2); } @Test @@ -324,10 +286,10 @@ public class CacheKeyBuilderTest .build(); final byte[] key2 = new CacheKeyBuilder((byte) 10) - .appendCacheables(Collections.singletonList((Cacheable) null)) + .appendCacheables(Collections.singletonList(null)) .build(); - assertFalse(Arrays.equals(key1, key2)); + Assert.assertFalse(Arrays.equals(key1, key2)); } @Test @@ -348,34 +310,13 @@ public class CacheKeyBuilderTest .put(StringUtils.toUtf8("test2")) .array(); - assertArrayEquals(expected, actual); + Assert.assertArrayEquals(expected, actual); - final Cacheable c1 = new Cacheable() - { - @Override - public byte[] getCacheKey() - { - return StringUtils.toUtf8("te"); - } - }; + final Cacheable c1 = () -> StringUtils.toUtf8("te"); - final Cacheable c2 = new Cacheable() - { - @Override - public byte[] getCacheKey() - { - return StringUtils.toUtf8("test1"); - } - }; + final Cacheable c2 = () -> StringUtils.toUtf8("test1"); - final Cacheable c3 = new Cacheable() - { - @Override - public byte[] getCacheKey() - { - return StringUtils.toUtf8("test2"); - } - }; + final Cacheable c3 = () -> StringUtils.toUtf8("test2"); actual = new CacheKeyBuilder((byte) 10) .appendCacheablesIgnoringOrder(Lists.newArrayList(c3, c2, c1)) @@ -390,6 +331,6 @@ public class CacheKeyBuilderTest .put(c3.getCacheKey()) .array(); - assertArrayEquals(expected, actual); + Assert.assertArrayEquals(expected, actual); } } diff --git a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryMergeBufferTest.java b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryMergeBufferTest.java index 36e55051e5f..ed170d78fab 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryMergeBufferTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/GroupByQueryMergeBufferTest.java @@ -42,6 +42,7 @@ import org.apache.druid.query.groupby.strategy.GroupByStrategySelector; import org.apache.druid.query.groupby.strategy.GroupByStrategyV1; import org.apache.druid.query.groupby.strategy.GroupByStrategyV2; import org.junit.AfterClass; +import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.runner.RunWith; @@ -54,8 +55,6 @@ import java.util.Collection; import java.util.Collections; import java.util.List; -import static org.junit.Assert.assertEquals; - @RunWith(Parameterized.class) public class GroupByQueryMergeBufferTest { @@ -65,7 +64,7 @@ public class GroupByQueryMergeBufferTest { private int minRemainBufferNum; - public TestBlockingPool(Supplier generator, int limit) + TestBlockingPool(Supplier generator, int limit) { super(generator, limit); minRemainBufferNum = limit; @@ -93,18 +92,18 @@ public class GroupByQueryMergeBufferTest return holder; } - public void resetMinRemainBufferNum() + void resetMinRemainBufferNum() { minRemainBufferNum = PROCESSING_CONFIG.getNumMergeBuffers(); } - public int getMinRemainBufferNum() + int getMinRemainBufferNum() { return minRemainBufferNum; } } - public static final DruidProcessingConfig PROCESSING_CONFIG = new DruidProcessingConfig() + private static final DruidProcessingConfig PROCESSING_CONFIG = new DruidProcessingConfig() { @Override public String getFormatString() @@ -164,25 +163,11 @@ public class GroupByQueryMergeBufferTest private static final CloseableStupidPool bufferPool = new CloseableStupidPool<>( "GroupByQueryEngine-bufferPool", - new Supplier() - { - @Override - public ByteBuffer get() - { - return ByteBuffer.allocateDirect(PROCESSING_CONFIG.intermediateComputeSizeBytes()); - } - } + () -> ByteBuffer.allocateDirect(PROCESSING_CONFIG.intermediateComputeSizeBytes()) ); private static final TestBlockingPool mergeBufferPool = new TestBlockingPool( - new Supplier() - { - @Override - public ByteBuffer get() - { - return ByteBuffer.allocateDirect(PROCESSING_CONFIG.intermediateComputeSizeBytes()); - } - }, + () -> ByteBuffer.allocateDirect(PROCESSING_CONFIG.intermediateComputeSizeBytes()), PROCESSING_CONFIG.getNumMergeBuffers() ); @@ -198,7 +183,7 @@ public class GroupByQueryMergeBufferTest } ); - private QueryRunner runner; + private final QueryRunner runner; @AfterClass public static void teardownClass() @@ -242,8 +227,8 @@ public class GroupByQueryMergeBufferTest GroupByQueryRunnerTestHelper.runQuery(factory, runner, query); - assertEquals(2, mergeBufferPool.getMinRemainBufferNum()); - assertEquals(3, mergeBufferPool.getPoolSize()); + Assert.assertEquals(2, mergeBufferPool.getMinRemainBufferNum()); + Assert.assertEquals(3, mergeBufferPool.getPoolSize()); } @Test @@ -270,8 +255,8 @@ public class GroupByQueryMergeBufferTest GroupByQueryRunnerTestHelper.runQuery(factory, runner, query); - assertEquals(1, mergeBufferPool.getMinRemainBufferNum()); - assertEquals(3, mergeBufferPool.getPoolSize()); + Assert.assertEquals(1, mergeBufferPool.getMinRemainBufferNum()); + Assert.assertEquals(3, mergeBufferPool.getPoolSize()); } @Test @@ -310,8 +295,8 @@ public class GroupByQueryMergeBufferTest GroupByQueryRunnerTestHelper.runQuery(factory, runner, query); // This should be 0 because the broker needs 2 buffers and the queryable node needs one. - assertEquals(0, mergeBufferPool.getMinRemainBufferNum()); - assertEquals(3, mergeBufferPool.getPoolSize()); + Assert.assertEquals(0, mergeBufferPool.getMinRemainBufferNum()); + Assert.assertEquals(3, mergeBufferPool.getPoolSize()); } @Test @@ -363,7 +348,7 @@ public class GroupByQueryMergeBufferTest GroupByQueryRunnerTestHelper.runQuery(factory, runner, query); // This should be 0 because the broker needs 2 buffers and the queryable node needs one. - assertEquals(0, mergeBufferPool.getMinRemainBufferNum()); - assertEquals(3, mergeBufferPool.getPoolSize()); + Assert.assertEquals(0, mergeBufferPool.getMinRemainBufferNum()); + Assert.assertEquals(3, mergeBufferPool.getPoolSize()); } } diff --git a/processing/src/test/java/org/apache/druid/query/groupby/having/DimensionSelectorHavingSpecTest.java b/processing/src/test/java/org/apache/druid/query/groupby/having/DimensionSelectorHavingSpecTest.java index 3e6236a283f..b03be7bf386 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/having/DimensionSelectorHavingSpecTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/having/DimensionSelectorHavingSpecTest.java @@ -32,12 +32,6 @@ import org.junit.Test; import java.util.Map; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertNotEquals; -import static org.junit.Assert.assertTrue; - - public class DimensionSelectorHavingSpecTest { @@ -58,7 +52,7 @@ public class DimensionSelectorHavingSpecTest ); ObjectMapper mapper = new DefaultObjectMapper(); - assertEquals(dimHavingSpec, mapper.convertValue(dimSelectMap, DimensionSelectorHavingSpec.class)); + Assert.assertEquals(dimHavingSpec, mapper.convertValue(dimSelectMap, DimensionSelectorHavingSpec.class)); } @Test @@ -83,14 +77,13 @@ public class DimensionSelectorHavingSpecTest HavingSpec dimHavingSpec13 = new DimensionSelectorHavingSpec("dim", "value", extractionFn1); HavingSpec dimHavingSpec14 = new DimensionSelectorHavingSpec("dim", "value", extractionFn2); - assertEquals(dimHavingSpec1, dimHavingSpec2); - assertNotEquals(dimHavingSpec3, dimHavingSpec4); - assertNotEquals(dimHavingSpec5, dimHavingSpec6); - assertEquals(dimHavingSpec7, dimHavingSpec8); - assertNotEquals(dimHavingSpec9, dimHavingSpec10); - assertNotEquals(dimHavingSpec11, dimHavingSpec12); - assertNotEquals(dimHavingSpec13, dimHavingSpec14); - + Assert.assertEquals(dimHavingSpec1, dimHavingSpec2); + Assert.assertNotEquals(dimHavingSpec3, dimHavingSpec4); + Assert.assertNotEquals(dimHavingSpec5, dimHavingSpec6); + Assert.assertEquals(dimHavingSpec7, dimHavingSpec8); + Assert.assertNotEquals(dimHavingSpec9, dimHavingSpec10); + Assert.assertNotEquals(dimHavingSpec11, dimHavingSpec12); + Assert.assertNotEquals(dimHavingSpec13, dimHavingSpec14); } @Test @@ -98,22 +91,23 @@ public class DimensionSelectorHavingSpecTest { ExtractionFn extractionFn = new RegexDimExtractionFn("^([^,]*),", false, ""); String expected = "DimensionSelectorHavingSpec{" + - "dimension='gender'," + - " value='m'," + - " extractionFn=regex(/^([^,]*),/, 1)}"; + "dimension='gender'," + + " value='m'," + + " extractionFn=regex(/^([^,]*),/, 1)}"; Assert.assertEquals(expected, new DimensionSelectorHavingSpec("gender", "m", extractionFn).toString()); - + expected = "DimensionSelectorHavingSpec{" + - "dimension='gender'," + - " value='m'," + - " extractionFn=Identity}"; - + "dimension='gender'," + + " value='m'," + + " extractionFn=Identity}"; + Assert.assertEquals(expected, new DimensionSelectorHavingSpec("gender", "m", null).toString()); } @Test(expected = NullPointerException.class) public void testNullDimension() { + //noinspection ResultOfObjectAllocationIgnored (result is not needed) new DimensionSelectorHavingSpec(null, "value", null); } @@ -121,34 +115,33 @@ public class DimensionSelectorHavingSpecTest public void testDimensionFilterSpec() { DimensionSelectorHavingSpec spec = new DimensionSelectorHavingSpec("dimension", "v", null); - assertTrue(spec.eval(getTestRow("v"))); - assertTrue(spec.eval(getTestRow(ImmutableList.of("v", "v1")))); - assertFalse(spec.eval(getTestRow(ImmutableList.of()))); - assertFalse(spec.eval(getTestRow("v1"))); + Assert.assertTrue(spec.eval(getTestRow("v"))); + Assert.assertTrue(spec.eval(getTestRow(ImmutableList.of("v", "v1")))); + Assert.assertFalse(spec.eval(getTestRow(ImmutableList.of()))); + Assert.assertFalse(spec.eval(getTestRow("v1"))); spec = new DimensionSelectorHavingSpec("dimension", null, null); - assertTrue(spec.eval(getTestRow(ImmutableList.of()))); - assertTrue(spec.eval(getTestRow(ImmutableList.of("")))); - assertFalse(spec.eval(getTestRow(ImmutableList.of("v")))); - assertFalse(spec.eval(getTestRow(ImmutableList.of("v", "v1")))); + Assert.assertTrue(spec.eval(getTestRow(ImmutableList.of()))); + Assert.assertTrue(spec.eval(getTestRow(ImmutableList.of("")))); + Assert.assertFalse(spec.eval(getTestRow(ImmutableList.of("v")))); + Assert.assertFalse(spec.eval(getTestRow(ImmutableList.of("v", "v1")))); spec = new DimensionSelectorHavingSpec("dimension", "", null); - assertTrue(spec.eval(getTestRow(ImmutableList.of()))); - assertTrue(spec.eval(getTestRow(ImmutableList.of("")))); - assertTrue(spec.eval(getTestRow(ImmutableList.of("v", "v1", "")))); - assertFalse(spec.eval(getTestRow(ImmutableList.of("v")))); - assertFalse(spec.eval(getTestRow(ImmutableList.of("v", "v1")))); + Assert.assertTrue(spec.eval(getTestRow(ImmutableList.of()))); + Assert.assertTrue(spec.eval(getTestRow(ImmutableList.of("")))); + Assert.assertTrue(spec.eval(getTestRow(ImmutableList.of("v", "v1", "")))); + Assert.assertFalse(spec.eval(getTestRow(ImmutableList.of("v")))); + Assert.assertFalse(spec.eval(getTestRow(ImmutableList.of("v", "v1")))); ExtractionFn extractionFn = new RegexDimExtractionFn("^([^,]*),", true, "default"); spec = new DimensionSelectorHavingSpec("dimension", "v", extractionFn); - assertTrue(spec.eval(getTestRow(ImmutableList.of("v,v1", "v2,v3")))); - assertFalse(spec.eval(getTestRow(ImmutableList.of("v1,v4")))); - assertFalse(spec.eval(getTestRow(ImmutableList.of("v")))); - assertFalse(spec.eval(getTestRow(ImmutableList.of("v1", "default")))); - assertTrue(spec.eval(getTestRow(ImmutableList.of("v,default", "none")))); - - spec = new DimensionSelectorHavingSpec("dimension", "default", extractionFn); - assertTrue(spec.eval(getTestRow(ImmutableList.of("v1,v2", "none")))); + Assert.assertTrue(spec.eval(getTestRow(ImmutableList.of("v,v1", "v2,v3")))); + Assert.assertFalse(spec.eval(getTestRow(ImmutableList.of("v1,v4")))); + Assert.assertFalse(spec.eval(getTestRow(ImmutableList.of("v")))); + Assert.assertFalse(spec.eval(getTestRow(ImmutableList.of("v1", "default")))); + Assert.assertTrue(spec.eval(getTestRow(ImmutableList.of("v,default", "none")))); + spec = new DimensionSelectorHavingSpec("dimension", "default", extractionFn); + Assert.assertTrue(spec.eval(getTestRow(ImmutableList.of("v1,v2", "none")))); } } diff --git a/processing/src/test/java/org/apache/druid/query/groupby/having/HavingSpecTest.java b/processing/src/test/java/org/apache/druid/query/groupby/having/HavingSpecTest.java index a0e1d745711..752126e1799 100644 --- a/processing/src/test/java/org/apache/druid/query/groupby/having/HavingSpecTest.java +++ b/processing/src/test/java/org/apache/druid/query/groupby/having/HavingSpecTest.java @@ -27,6 +27,7 @@ import org.apache.druid.data.input.Row; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.query.cache.CacheKeyBuilder; +import org.junit.Assert; import org.junit.Test; import java.util.ArrayList; @@ -35,28 +36,23 @@ import java.util.List; import java.util.Map; import java.util.concurrent.atomic.AtomicInteger; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - - public class HavingSpecTest { private static final Row ROW = new MapBasedInputRow( 0, new ArrayList<>(), - ImmutableMap.of("metric", Float.valueOf(10)) + ImmutableMap.of("metric", 10f) ); @Test public void testHavingClauseSerde() { List havings = Arrays.asList( - new GreaterThanHavingSpec("agg", Double.valueOf(1.3)), + new GreaterThanHavingSpec("agg", 1.3), new OrHavingSpec( Arrays.asList( - new LessThanHavingSpec("lessAgg", Long.valueOf(1L)), - new NotHavingSpec(new EqualToHavingSpec("equalAgg", Double.valueOf(2))) + new LessThanHavingSpec("lessAgg", 1L), + new NotHavingSpec(new EqualToHavingSpec("equalAgg", 2.0)) ) ) ); @@ -91,7 +87,7 @@ public class HavingSpecTest ); ObjectMapper mapper = new DefaultObjectMapper(); - assertEquals(andHavingSpec, mapper.convertValue(payloadMap, AndHavingSpec.class)); + Assert.assertEquals(andHavingSpec, mapper.convertValue(payloadMap, AndHavingSpec.class)); } @Test(expected = IllegalArgumentException.class) @@ -110,111 +106,111 @@ public class HavingSpecTest @Test public void testGreaterThanHavingSpec() { - GreaterThanHavingSpec spec = new GreaterThanHavingSpec("metric", Long.valueOf(Long.MAX_VALUE - 10)); - assertFalse(spec.eval(getTestRow(Long.valueOf(Long.MAX_VALUE - 10)))); - assertFalse(spec.eval(getTestRow(Long.valueOf(Long.MAX_VALUE - 15)))); - assertTrue(spec.eval(getTestRow(Long.valueOf(Long.MAX_VALUE - 5)))); - assertTrue(spec.eval(getTestRow(String.valueOf(Long.MAX_VALUE - 5)))); - assertFalse(spec.eval(getTestRow(100.05f))); + GreaterThanHavingSpec spec = new GreaterThanHavingSpec("metric", Long.MAX_VALUE - 10); + Assert.assertFalse(spec.eval(getTestRow(Long.MAX_VALUE - 10))); + Assert.assertFalse(spec.eval(getTestRow(Long.MAX_VALUE - 15))); + Assert.assertTrue(spec.eval(getTestRow(Long.MAX_VALUE - 5))); + Assert.assertTrue(spec.eval(getTestRow(String.valueOf(Long.MAX_VALUE - 5)))); + Assert.assertFalse(spec.eval(getTestRow(100.05f))); spec = new GreaterThanHavingSpec("metric", 100.56f); - assertFalse(spec.eval(getTestRow(100.56f))); - assertFalse(spec.eval(getTestRow(90.53f))); - assertFalse(spec.eval(getTestRow("90.53f"))); - assertTrue(spec.eval(getTestRow(101.34f))); - assertTrue(spec.eval(getTestRow(Long.MAX_VALUE))); + Assert.assertFalse(spec.eval(getTestRow(100.56f))); + Assert.assertFalse(spec.eval(getTestRow(90.53f))); + Assert.assertFalse(spec.eval(getTestRow("90.53f"))); + Assert.assertTrue(spec.eval(getTestRow(101.34f))); + Assert.assertTrue(spec.eval(getTestRow(Long.MAX_VALUE))); } @Test public void testLessThanHavingSpec() { - LessThanHavingSpec spec = new LessThanHavingSpec("metric", Long.valueOf(Long.MAX_VALUE - 10)); - assertFalse(spec.eval(getTestRow(Long.valueOf(Long.MAX_VALUE - 10)))); - assertTrue(spec.eval(getTestRow(Long.valueOf(Long.MAX_VALUE - 15)))); - assertTrue(spec.eval(getTestRow(String.valueOf(Long.MAX_VALUE - 15)))); - assertFalse(spec.eval(getTestRow(Long.valueOf(Long.MAX_VALUE - 5)))); - assertTrue(spec.eval(getTestRow(100.05f))); + LessThanHavingSpec spec = new LessThanHavingSpec("metric", Long.MAX_VALUE - 10); + Assert.assertFalse(spec.eval(getTestRow(Long.MAX_VALUE - 10))); + Assert.assertTrue(spec.eval(getTestRow(Long.MAX_VALUE - 15))); + Assert.assertTrue(spec.eval(getTestRow(String.valueOf(Long.MAX_VALUE - 15)))); + Assert.assertFalse(spec.eval(getTestRow(Long.MAX_VALUE - 5))); + Assert.assertTrue(spec.eval(getTestRow(100.05f))); spec = new LessThanHavingSpec("metric", 100.56f); - assertFalse(spec.eval(getTestRow(100.56f))); - assertTrue(spec.eval(getTestRow(90.53f))); - assertFalse(spec.eval(getTestRow(101.34f))); - assertFalse(spec.eval(getTestRow("101.34f"))); - assertFalse(spec.eval(getTestRow(Long.MAX_VALUE))); + Assert.assertFalse(spec.eval(getTestRow(100.56f))); + Assert.assertTrue(spec.eval(getTestRow(90.53f))); + Assert.assertFalse(spec.eval(getTestRow(101.34f))); + Assert.assertFalse(spec.eval(getTestRow("101.34f"))); + Assert.assertFalse(spec.eval(getTestRow(Long.MAX_VALUE))); } private Row getTestRow(Object metricValue) { - return new MapBasedInputRow(0, new ArrayList(), ImmutableMap.of("metric", metricValue)); + return new MapBasedInputRow(0, new ArrayList<>(), ImmutableMap.of("metric", metricValue)); } @Test public void testEqualHavingSpec() { - EqualToHavingSpec spec = new EqualToHavingSpec("metric", Long.valueOf(Long.MAX_VALUE - 10)); - assertTrue(spec.eval(getTestRow(Long.valueOf(Long.MAX_VALUE - 10)))); - assertFalse(spec.eval(getTestRow(Long.valueOf(Long.MAX_VALUE - 5)))); - assertFalse(spec.eval(getTestRow(100.05f))); + EqualToHavingSpec spec = new EqualToHavingSpec("metric", Long.MAX_VALUE - 10); + Assert.assertTrue(spec.eval(getTestRow(Long.MAX_VALUE - 10))); + Assert.assertFalse(spec.eval(getTestRow(Long.MAX_VALUE - 5))); + Assert.assertFalse(spec.eval(getTestRow(100.05f))); spec = new EqualToHavingSpec("metric", 100.56f); - assertFalse(spec.eval(getTestRow(100L))); - assertFalse(spec.eval(getTestRow(100.0))); - assertFalse(spec.eval(getTestRow(100d))); - assertFalse(spec.eval(getTestRow(100.56d))); // False since 100.56d != (double) 100.56f - assertFalse(spec.eval(getTestRow(90.53d))); - assertTrue(spec.eval(getTestRow(100.56f))); - assertFalse(spec.eval(getTestRow(90.53f))); - assertFalse(spec.eval(getTestRow(Long.MAX_VALUE))); + Assert.assertFalse(spec.eval(getTestRow(100L))); + Assert.assertFalse(spec.eval(getTestRow(100.0))); + Assert.assertFalse(spec.eval(getTestRow(100d))); + Assert.assertFalse(spec.eval(getTestRow(100.56d))); // False since 100.56d != (double) 100.56f + Assert.assertFalse(spec.eval(getTestRow(90.53d))); + Assert.assertTrue(spec.eval(getTestRow(100.56f))); + Assert.assertFalse(spec.eval(getTestRow(90.53f))); + Assert.assertFalse(spec.eval(getTestRow(Long.MAX_VALUE))); spec = new EqualToHavingSpec("metric", 100.56d); - assertFalse(spec.eval(getTestRow(100L))); - assertFalse(spec.eval(getTestRow(100.0))); - assertFalse(spec.eval(getTestRow(100d))); - assertTrue(spec.eval(getTestRow(100.56d))); - assertFalse(spec.eval(getTestRow(90.53d))); - assertFalse(spec.eval(getTestRow(100.56f))); // False since 100.56d != (double) 100.56f - assertFalse(spec.eval(getTestRow(90.53f))); - assertFalse(spec.eval(getTestRow(Long.MAX_VALUE))); + Assert.assertFalse(spec.eval(getTestRow(100L))); + Assert.assertFalse(spec.eval(getTestRow(100.0))); + Assert.assertFalse(spec.eval(getTestRow(100d))); + Assert.assertTrue(spec.eval(getTestRow(100.56d))); + Assert.assertFalse(spec.eval(getTestRow(90.53d))); + Assert.assertFalse(spec.eval(getTestRow(100.56f))); // False since 100.56d != (double) 100.56f + Assert.assertFalse(spec.eval(getTestRow(90.53f))); + Assert.assertFalse(spec.eval(getTestRow(Long.MAX_VALUE))); spec = new EqualToHavingSpec("metric", 100.0f); - assertTrue(spec.eval(getTestRow(100L))); - assertTrue(spec.eval(getTestRow(100.0))); - assertTrue(spec.eval(getTestRow(100d))); - assertFalse(spec.eval(getTestRow(100.56d))); - assertFalse(spec.eval(getTestRow(90.53d))); - assertFalse(spec.eval(getTestRow(100.56f))); - assertFalse(spec.eval(getTestRow(90.53f))); - assertFalse(spec.eval(getTestRow(Long.MAX_VALUE))); + Assert.assertTrue(spec.eval(getTestRow(100L))); + Assert.assertTrue(spec.eval(getTestRow(100.0))); + Assert.assertTrue(spec.eval(getTestRow(100d))); + Assert.assertFalse(spec.eval(getTestRow(100.56d))); + Assert.assertFalse(spec.eval(getTestRow(90.53d))); + Assert.assertFalse(spec.eval(getTestRow(100.56f))); + Assert.assertFalse(spec.eval(getTestRow(90.53f))); + Assert.assertFalse(spec.eval(getTestRow(Long.MAX_VALUE))); spec = new EqualToHavingSpec("metric", 100.0d); - assertTrue(spec.eval(getTestRow(100L))); - assertTrue(spec.eval(getTestRow(100.0))); - assertTrue(spec.eval(getTestRow(100d))); - assertFalse(spec.eval(getTestRow(100.56d))); - assertFalse(spec.eval(getTestRow(90.53d))); - assertFalse(spec.eval(getTestRow(100.56f))); - assertFalse(spec.eval(getTestRow(90.53f))); - assertFalse(spec.eval(getTestRow(Long.MAX_VALUE))); + Assert.assertTrue(spec.eval(getTestRow(100L))); + Assert.assertTrue(spec.eval(getTestRow(100.0))); + Assert.assertTrue(spec.eval(getTestRow(100d))); + Assert.assertFalse(spec.eval(getTestRow(100.56d))); + Assert.assertFalse(spec.eval(getTestRow(90.53d))); + Assert.assertFalse(spec.eval(getTestRow(100.56f))); + Assert.assertFalse(spec.eval(getTestRow(90.53f))); + Assert.assertFalse(spec.eval(getTestRow(Long.MAX_VALUE))); spec = new EqualToHavingSpec("metric", 100); - assertTrue(spec.eval(getTestRow(100L))); - assertTrue(spec.eval(getTestRow(100.0))); - assertTrue(spec.eval(getTestRow(100d))); - assertFalse(spec.eval(getTestRow(100.56d))); - assertFalse(spec.eval(getTestRow(90.53d))); - assertFalse(spec.eval(getTestRow(100.56f))); - assertFalse(spec.eval(getTestRow(90.53f))); - assertFalse(spec.eval(getTestRow(Long.MAX_VALUE))); + Assert.assertTrue(spec.eval(getTestRow(100L))); + Assert.assertTrue(spec.eval(getTestRow(100.0))); + Assert.assertTrue(spec.eval(getTestRow(100d))); + Assert.assertFalse(spec.eval(getTestRow(100.56d))); + Assert.assertFalse(spec.eval(getTestRow(90.53d))); + Assert.assertFalse(spec.eval(getTestRow(100.56f))); + Assert.assertFalse(spec.eval(getTestRow(90.53f))); + Assert.assertFalse(spec.eval(getTestRow(Long.MAX_VALUE))); spec = new EqualToHavingSpec("metric", 100L); - assertTrue(spec.eval(getTestRow(100L))); - assertTrue(spec.eval(getTestRow(100.0))); - assertTrue(spec.eval(getTestRow(100d))); - assertFalse(spec.eval(getTestRow(100.56d))); - assertFalse(spec.eval(getTestRow(90.53d))); - assertFalse(spec.eval(getTestRow(100.56f))); - assertFalse(spec.eval(getTestRow(90.53f))); - assertFalse(spec.eval(getTestRow(Long.MAX_VALUE))); + Assert.assertTrue(spec.eval(getTestRow(100L))); + Assert.assertTrue(spec.eval(getTestRow(100.0))); + Assert.assertTrue(spec.eval(getTestRow(100d))); + Assert.assertFalse(spec.eval(getTestRow(100.56d))); + Assert.assertFalse(spec.eval(getTestRow(90.53d))); + Assert.assertFalse(spec.eval(getTestRow(100.56f))); + Assert.assertFalse(spec.eval(getTestRow(90.53f))); + Assert.assertFalse(spec.eval(getTestRow(Long.MAX_VALUE))); } private static class CountingHavingSpec extends BaseHavingSpec @@ -251,7 +247,7 @@ public class HavingSpecTest { AtomicInteger counter = new AtomicInteger(0); AndHavingSpec spec = new AndHavingSpec(ImmutableList.of( - (HavingSpec) new CountingHavingSpec(counter, true), + new CountingHavingSpec(counter, true), new CountingHavingSpec(counter, false), new CountingHavingSpec(counter, true), new CountingHavingSpec(counter, false) @@ -259,7 +255,7 @@ public class HavingSpecTest spec.eval(ROW); - assertEquals(2, counter.get()); + Assert.assertEquals(2, counter.get()); } @Test @@ -267,7 +263,7 @@ public class HavingSpecTest { AtomicInteger counter = new AtomicInteger(0); AndHavingSpec spec = new AndHavingSpec(ImmutableList.of( - (HavingSpec) new CountingHavingSpec(counter, true), + new CountingHavingSpec(counter, true), new CountingHavingSpec(counter, true), new CountingHavingSpec(counter, true), new CountingHavingSpec(counter, true) @@ -275,11 +271,11 @@ public class HavingSpecTest spec.eval(ROW); - assertEquals(4, counter.get()); + Assert.assertEquals(4, counter.get()); counter.set(0); spec = new AndHavingSpec(ImmutableList.of( - (HavingSpec) new CountingHavingSpec(counter, false), + new CountingHavingSpec(counter, false), new CountingHavingSpec(counter, true), new CountingHavingSpec(counter, true), new CountingHavingSpec(counter, true) @@ -287,7 +283,7 @@ public class HavingSpecTest spec.eval(ROW); - assertEquals(1, counter.get()); + Assert.assertEquals(1, counter.get()); } @Test @@ -295,7 +291,7 @@ public class HavingSpecTest { AtomicInteger counter = new AtomicInteger(0); OrHavingSpec spec = new OrHavingSpec(ImmutableList.of( - (HavingSpec) new CountingHavingSpec(counter, true), + new CountingHavingSpec(counter, true), new CountingHavingSpec(counter, true), new CountingHavingSpec(counter, true), new CountingHavingSpec(counter, false) @@ -303,7 +299,7 @@ public class HavingSpecTest spec.eval(ROW); - assertEquals(1, counter.get()); + Assert.assertEquals(1, counter.get()); } @Test @@ -311,7 +307,7 @@ public class HavingSpecTest { AtomicInteger counter = new AtomicInteger(0); OrHavingSpec spec = new OrHavingSpec(ImmutableList.of( - (HavingSpec) new CountingHavingSpec(counter, false), + new CountingHavingSpec(counter, false), new CountingHavingSpec(counter, false), new CountingHavingSpec(counter, false), new CountingHavingSpec(counter, false) @@ -319,11 +315,11 @@ public class HavingSpecTest spec.eval(ROW); - assertEquals(4, counter.get()); + Assert.assertEquals(4, counter.get()); counter.set(0); spec = new OrHavingSpec(ImmutableList.of( - (HavingSpec) new CountingHavingSpec(counter, false), + new CountingHavingSpec(counter, false), new CountingHavingSpec(counter, false), new CountingHavingSpec(counter, false), new CountingHavingSpec(counter, true) @@ -331,17 +327,16 @@ public class HavingSpecTest spec.eval(ROW); - assertEquals(4, counter.get()); + Assert.assertEquals(4, counter.get()); } @Test public void testNotHavingSepc() { NotHavingSpec spec = new NotHavingSpec(HavingSpec.NEVER); - assertTrue(spec.eval(ROW)); + Assert.assertTrue(spec.eval(ROW)); spec = new NotHavingSpec(HavingSpec.ALWAYS); - assertFalse(spec.eval(ROW)); - + Assert.assertFalse(spec.eval(ROW)); } } diff --git a/processing/src/test/java/org/apache/druid/query/search/SearchQueryRunnerWithCaseTest.java b/processing/src/test/java/org/apache/druid/query/search/SearchQueryRunnerWithCaseTest.java index b80253890ea..f954949dbfa 100644 --- a/processing/src/test/java/org/apache/druid/query/search/SearchQueryRunnerWithCaseTest.java +++ b/processing/src/test/java/org/apache/druid/query/search/SearchQueryRunnerWithCaseTest.java @@ -28,6 +28,7 @@ import org.apache.druid.query.Druids; import org.apache.druid.query.Druids.SearchQueryBuilder; import org.apache.druid.query.QueryPlus; import org.apache.druid.query.QueryRunner; +import org.apache.druid.query.QueryRunnerTestHelper; import org.apache.druid.query.Result; import org.apache.druid.segment.IncrementalIndexSegment; import org.apache.druid.segment.QueryableIndex; @@ -50,20 +51,6 @@ import java.util.Map; import java.util.Set; import java.util.TreeMap; -import static org.apache.druid.query.QueryRunnerTestHelper.NOOP_QUERYWATCHER; -import static org.apache.druid.query.QueryRunnerTestHelper.allGran; -import static org.apache.druid.query.QueryRunnerTestHelper.dataSource; -import static org.apache.druid.query.QueryRunnerTestHelper.fullOnIntervalSpec; -import static org.apache.druid.query.QueryRunnerTestHelper.makeQueryRunner; -import static org.apache.druid.query.QueryRunnerTestHelper.marketDimension; -import static org.apache.druid.query.QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator; -import static org.apache.druid.query.QueryRunnerTestHelper.placementDimension; -import static org.apache.druid.query.QueryRunnerTestHelper.placementishDimension; -import static org.apache.druid.query.QueryRunnerTestHelper.qualityDimension; -import static org.apache.druid.query.QueryRunnerTestHelper.transformToConstructionFeeder; - -/** - */ @RunWith(Parameterized.class) public class SearchQueryRunnerWithCaseTest { @@ -94,25 +81,25 @@ public class SearchQueryRunnerWithCaseTest final List>> runners = new ArrayList<>(); for (SearchQueryConfig config : configs) { runners.addAll(Arrays.asList( - makeQueryRunner( + QueryRunnerTestHelper.makeQueryRunner( makeRunnerFactory(config), SegmentId.dummy("index1"), new IncrementalIndexSegment(index1, SegmentId.dummy("index1")), "index1" ), - makeQueryRunner( + QueryRunnerTestHelper.makeQueryRunner( makeRunnerFactory(config), SegmentId.dummy("index2"), new IncrementalIndexSegment(index2, SegmentId.dummy("index2")), "index2" ), - makeQueryRunner( + QueryRunnerTestHelper.makeQueryRunner( makeRunnerFactory(config), SegmentId.dummy("index3"), new QueryableIndexSegment(index3, SegmentId.dummy("index3")), "index3" ), - makeQueryRunner( + QueryRunnerTestHelper.makeQueryRunner( makeRunnerFactory(config), SegmentId.dummy("index4"), new QueryableIndexSegment(index4, SegmentId.dummy("index4")), @@ -121,7 +108,7 @@ public class SearchQueryRunnerWithCaseTest )); } - return transformToConstructionFeeder(runners); + return QueryRunnerTestHelper.transformToConstructionFeeder(runners); } static SearchQueryRunnerFactory makeRunnerFactory(final SearchQueryConfig config) @@ -130,9 +117,9 @@ public class SearchQueryRunnerWithCaseTest new SearchStrategySelector(Suppliers.ofInstance(config)), new SearchQueryQueryToolChest( config, - noopIntervalChunkingQueryRunnerDecorator() + QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator() ), - NOOP_QUERYWATCHER + QueryRunnerTestHelper.NOOP_QUERYWATCHER ); } @@ -148,9 +135,9 @@ public class SearchQueryRunnerWithCaseTest private Druids.SearchQueryBuilder testBuilder() { return Druids.newSearchQueryBuilder() - .dataSource(dataSource) - .granularity(allGran) - .intervals(fullOnIntervalSpec); + .dataSource(QueryRunnerTestHelper.dataSource) + .granularity(QueryRunnerTestHelper.allGran) + .intervals(QueryRunnerTestHelper.fullOnIntervalSpec); } @Test @@ -161,15 +148,15 @@ public class SearchQueryRunnerWithCaseTest SearchQuery searchQuery; searchQuery = builder.query("SPOT").build(); - expectedResults.put(marketDimension, Sets.newHashSet("spot", "SPot")); + expectedResults.put(QueryRunnerTestHelper.marketDimension, Sets.newHashSet("spot", "SPot")); checkSearchQuery(searchQuery, expectedResults); searchQuery = builder.query("spot", true).build(); - expectedResults.put(marketDimension, Sets.newHashSet("spot")); + expectedResults.put(QueryRunnerTestHelper.marketDimension, Sets.newHashSet("spot")); checkSearchQuery(searchQuery, expectedResults); searchQuery = builder.query("SPot", true).build(); - expectedResults.put(marketDimension, Sets.newHashSet("SPot")); + expectedResults.put(QueryRunnerTestHelper.marketDimension, Sets.newHashSet("SPot")); checkSearchQuery(searchQuery, expectedResults); } @@ -178,17 +165,23 @@ public class SearchQueryRunnerWithCaseTest { SearchQuery searchQuery; Druids.SearchQueryBuilder builder = testBuilder() - .dimensions(Arrays.asList(placementDimension, placementishDimension)); + .dimensions(Arrays.asList( + QueryRunnerTestHelper.placementDimension, + QueryRunnerTestHelper.placementishDimension + )); Map> expectedResults = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); searchQuery = builder.query("PREFERRED").build(); - expectedResults.put(placementDimension, Sets.newHashSet("PREFERRED", "preferred", "PREFERRed")); - expectedResults.put(placementishDimension, Sets.newHashSet("preferred", "Preferred")); + expectedResults.put( + QueryRunnerTestHelper.placementDimension, + Sets.newHashSet("PREFERRED", "preferred", "PREFERRed") + ); + expectedResults.put(QueryRunnerTestHelper.placementishDimension, Sets.newHashSet("preferred", "Preferred")); checkSearchQuery(searchQuery, expectedResults); searchQuery = builder.query("preferred", true).build(); - expectedResults.put(placementDimension, Sets.newHashSet("preferred")); - expectedResults.put(placementishDimension, Sets.newHashSet("preferred")); + expectedResults.put(QueryRunnerTestHelper.placementDimension, Sets.newHashSet("preferred")); + expectedResults.put(QueryRunnerTestHelper.placementishDimension, Sets.newHashSet("preferred")); checkSearchQuery(searchQuery, expectedResults); } @@ -197,12 +190,12 @@ public class SearchQueryRunnerWithCaseTest { SearchQuery searchQuery; Druids.SearchQueryBuilder builder = testBuilder() - .dimensions(Collections.singletonList(qualityDimension)) + .dimensions(Collections.singletonList(QueryRunnerTestHelper.qualityDimension)) .intervals("2011-01-12T00:00:00.000Z/2011-01-13T00:00:00.000Z"); Map> expectedResults = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); searchQuery = builder.query("otive").build(); - expectedResults.put(qualityDimension, Sets.newHashSet("AutoMotive")); + expectedResults.put(QueryRunnerTestHelper.qualityDimension, Sets.newHashSet("AutoMotive")); checkSearchQuery(searchQuery, expectedResults); } @@ -211,12 +204,12 @@ public class SearchQueryRunnerWithCaseTest { SearchQuery searchQuery; Druids.SearchQueryBuilder builder = testBuilder() - .dimensions(Collections.singletonList(qualityDimension)) + .dimensions(Collections.singletonList(QueryRunnerTestHelper.qualityDimension)) .intervals("2011-01-10T00:00:00.000Z/2011-01-11T00:00:00.000Z"); Map> expectedResults = new TreeMap<>(String.CASE_INSENSITIVE_ORDER); searchQuery = builder.query("business").build(); - expectedResults.put(qualityDimension, new HashSet<>()); + expectedResults.put(QueryRunnerTestHelper.qualityDimension, new HashSet<>()); checkSearchQuery(searchQuery, expectedResults); } @@ -228,11 +221,11 @@ public class SearchQueryRunnerWithCaseTest SearchQuery searchQuery; searchQuery = builder.fragments(Arrays.asList("auto", "ve")).build(); - expectedResults.put(qualityDimension, Sets.newHashSet("automotive", "AutoMotive")); + expectedResults.put(QueryRunnerTestHelper.qualityDimension, Sets.newHashSet("automotive", "AutoMotive")); checkSearchQuery(searchQuery, expectedResults); searchQuery = builder.fragments(Arrays.asList("auto", "ve"), true).build(); - expectedResults.put(qualityDimension, Sets.newHashSet("automotive")); + expectedResults.put(QueryRunnerTestHelper.qualityDimension, Sets.newHashSet("automotive")); checkSearchQuery(searchQuery, expectedResults); } diff --git a/processing/src/test/java/org/apache/druid/query/topn/TopNMetricSpecOptimizationsTest.java b/processing/src/test/java/org/apache/druid/query/topn/TopNMetricSpecOptimizationsTest.java index 91a00e80b05..ad215914a5d 100644 --- a/processing/src/test/java/org/apache/druid/query/topn/TopNMetricSpecOptimizationsTest.java +++ b/processing/src/test/java/org/apache/druid/query/topn/TopNMetricSpecOptimizationsTest.java @@ -28,6 +28,7 @@ import org.apache.druid.java.util.common.Pair; import org.apache.druid.java.util.common.granularity.Granularity; import org.apache.druid.java.util.common.guava.Sequence; import org.apache.druid.query.QueryMetrics; +import org.apache.druid.query.QueryRunnerTestHelper; import org.apache.druid.query.aggregation.DoubleMaxAggregatorFactory; import org.apache.druid.query.aggregation.DoubleMinAggregatorFactory; import org.apache.druid.query.filter.Filter; @@ -51,14 +52,6 @@ import org.junit.Test; import javax.annotation.Nullable; import java.util.Collections; -import static org.apache.druid.query.QueryRunnerTestHelper.addRowsIndexConstant; -import static org.apache.druid.query.QueryRunnerTestHelper.allGran; -import static org.apache.druid.query.QueryRunnerTestHelper.commonDoubleAggregators; -import static org.apache.druid.query.QueryRunnerTestHelper.dataSource; -import static org.apache.druid.query.QueryRunnerTestHelper.indexMetric; -import static org.apache.druid.query.QueryRunnerTestHelper.marketDimension; -import static org.apache.druid.query.QueryRunnerTestHelper.qualityDimension; - public class TopNMetricSpecOptimizationsTest { @Test @@ -68,16 +61,16 @@ public class TopNMetricSpecOptimizationsTest int cardinality = 1234; int threshold = 4; TopNQuery query = new TopNQueryBuilder() - .dataSource(dataSource) - .granularity(allGran) - .dimension(marketDimension) - .metric(indexMetric) + .dataSource(QueryRunnerTestHelper.dataSource) + .granularity(QueryRunnerTestHelper.allGran) + .dimension(QueryRunnerTestHelper.marketDimension) + .metric(QueryRunnerTestHelper.indexMetric) .threshold(threshold) .intervals("2018-05-30T00:00:00Z/2018-05-31T00:00:00Z") .aggregators( Lists.newArrayList( Iterables.concat( - commonDoubleAggregators, + QueryRunnerTestHelper.commonDoubleAggregators, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") @@ -85,7 +78,7 @@ public class TopNMetricSpecOptimizationsTest ) ) ) - .postAggregators(Collections.singletonList(addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); StorageAdapter adapter = @@ -112,16 +105,16 @@ public class TopNMetricSpecOptimizationsTest int cardinality = 1234; int threshold = 4; TopNQuery query = new TopNQueryBuilder() - .dataSource(dataSource) - .granularity(allGran) - .dimension(marketDimension) - .metric(indexMetric) + .dataSource(QueryRunnerTestHelper.dataSource) + .granularity(QueryRunnerTestHelper.allGran) + .dimension(QueryRunnerTestHelper.marketDimension) + .metric(QueryRunnerTestHelper.indexMetric) .threshold(threshold) .intervals("2018-05-30T00:00:00Z/2018-05-30T01:00:00Z") .aggregators( Lists.newArrayList( Iterables.concat( - commonDoubleAggregators, + QueryRunnerTestHelper.commonDoubleAggregators, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") @@ -129,7 +122,7 @@ public class TopNMetricSpecOptimizationsTest ) ) ) - .postAggregators(Collections.singletonList(addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); StorageAdapter adapter = @@ -157,16 +150,16 @@ public class TopNMetricSpecOptimizationsTest int cardinality = 1234; int threshold = 4; TopNQuery query = new TopNQueryBuilder() - .dataSource(dataSource) - .granularity(allGran) - .dimension(marketDimension) - .metric(indexMetric) + .dataSource(QueryRunnerTestHelper.dataSource) + .granularity(QueryRunnerTestHelper.allGran) + .dimension(QueryRunnerTestHelper.marketDimension) + .metric(QueryRunnerTestHelper.indexMetric) .threshold(threshold) .intervals("2018-05-30T00:00:00Z/2018-05-30T01:00:00Z") .aggregators( Lists.newArrayList( Iterables.concat( - commonDoubleAggregators, + QueryRunnerTestHelper.commonDoubleAggregators, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") @@ -174,7 +167,7 @@ public class TopNMetricSpecOptimizationsTest ) ) ) - .postAggregators(Collections.singletonList(addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); StorageAdapter adapter = @@ -202,17 +195,17 @@ public class TopNMetricSpecOptimizationsTest int cardinality = 1234; int threshold = 4; TopNQuery query = new TopNQueryBuilder() - .dataSource(dataSource) - .granularity(allGran) - .dimension(marketDimension) - .filters(qualityDimension, "entertainment") - .metric(indexMetric) + .dataSource(QueryRunnerTestHelper.dataSource) + .granularity(QueryRunnerTestHelper.allGran) + .dimension(QueryRunnerTestHelper.marketDimension) + .filters(QueryRunnerTestHelper.qualityDimension, "entertainment") + .metric(QueryRunnerTestHelper.indexMetric) .threshold(threshold) .intervals("2018-05-30T00:00:00Z/2018-05-31T00:00:00Z") .aggregators( Lists.newArrayList( Iterables.concat( - commonDoubleAggregators, + QueryRunnerTestHelper.commonDoubleAggregators, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") @@ -220,7 +213,7 @@ public class TopNMetricSpecOptimizationsTest ) ) ) - .postAggregators(Collections.singletonList(addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); StorageAdapter adapter = @@ -247,16 +240,16 @@ public class TopNMetricSpecOptimizationsTest int cardinality = 1234; int threshold = 4; TopNQuery query = new TopNQueryBuilder() - .dataSource(dataSource) - .granularity(allGran) - .dimension(marketDimension) - .metric(indexMetric) + .dataSource(QueryRunnerTestHelper.dataSource) + .granularity(QueryRunnerTestHelper.allGran) + .dimension(QueryRunnerTestHelper.marketDimension) + .metric(QueryRunnerTestHelper.indexMetric) .threshold(threshold) .intervals("2018-05-30T00:00:00Z/2018-05-31T00:00:00Z") .aggregators( Lists.newArrayList( Iterables.concat( - commonDoubleAggregators, + QueryRunnerTestHelper.commonDoubleAggregators, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") @@ -264,7 +257,7 @@ public class TopNMetricSpecOptimizationsTest ) ) ) - .postAggregators(Collections.singletonList(addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); diff --git a/processing/src/test/java/org/apache/druid/query/topn/TopNQueryTest.java b/processing/src/test/java/org/apache/druid/query/topn/TopNQueryTest.java index c86f3cb5b54..2fc0db7a1c9 100644 --- a/processing/src/test/java/org/apache/druid/query/topn/TopNQueryTest.java +++ b/processing/src/test/java/org/apache/druid/query/topn/TopNQueryTest.java @@ -24,6 +24,7 @@ import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import org.apache.druid.query.Query; +import org.apache.druid.query.QueryRunnerTestHelper; import org.apache.druid.query.aggregation.DoubleMaxAggregatorFactory; import org.apache.druid.query.aggregation.DoubleMinAggregatorFactory; import org.apache.druid.query.dimension.ExtractionDimensionSpec; @@ -38,15 +39,6 @@ import org.junit.Test; import java.io.IOException; import java.util.Collections; -import static org.apache.druid.query.QueryRunnerTestHelper.addRowsIndexConstant; -import static org.apache.druid.query.QueryRunnerTestHelper.allGran; -import static org.apache.druid.query.QueryRunnerTestHelper.commonDoubleAggregators; -import static org.apache.druid.query.QueryRunnerTestHelper.dataSource; -import static org.apache.druid.query.QueryRunnerTestHelper.fullOnIntervalSpec; -import static org.apache.druid.query.QueryRunnerTestHelper.indexMetric; -import static org.apache.druid.query.QueryRunnerTestHelper.marketDimension; -import static org.apache.druid.query.QueryRunnerTestHelper.rowsCount; - public class TopNQueryTest { private static final ObjectMapper jsonMapper = TestHelper.makeJsonMapper(); @@ -55,16 +47,16 @@ public class TopNQueryTest public void testQuerySerialization() throws IOException { Query query = new TopNQueryBuilder() - .dataSource(dataSource) - .granularity(allGran) - .dimension(marketDimension) - .metric(indexMetric) + .dataSource(QueryRunnerTestHelper.dataSource) + .granularity(QueryRunnerTestHelper.allGran) + .dimension(QueryRunnerTestHelper.marketDimension) + .metric(QueryRunnerTestHelper.indexMetric) .threshold(4) - .intervals(fullOnIntervalSpec) + .intervals(QueryRunnerTestHelper.fullOnIntervalSpec) .aggregators( Lists.newArrayList( Iterables.concat( - commonDoubleAggregators, + QueryRunnerTestHelper.commonDoubleAggregators, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") @@ -72,7 +64,7 @@ public class TopNQueryTest ) ) ) - .postAggregators(Collections.singletonList(addRowsIndexConstant)) + .postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant)) .build(); String json = jsonMapper.writeValueAsString(query); @@ -86,22 +78,28 @@ public class TopNQueryTest public void testQuerySerdeWithLookupExtractionFn() throws IOException { final TopNQuery expectedQuery = new TopNQueryBuilder() - .dataSource(dataSource) - .granularity(allGran) + .dataSource(QueryRunnerTestHelper.dataSource) + .granularity(QueryRunnerTestHelper.allGran) .dimension( new ExtractionDimensionSpec( - marketDimension, - marketDimension, - new LookupExtractionFn(new MapLookupExtractor(ImmutableMap.of("foo", "bar"), false), true, null, false, false) + QueryRunnerTestHelper.marketDimension, + QueryRunnerTestHelper.marketDimension, + new LookupExtractionFn( + new MapLookupExtractor(ImmutableMap.of("foo", "bar"), false), + true, + null, + false, + false + ) ) ) - .metric(new NumericTopNMetricSpec(indexMetric)) + .metric(new NumericTopNMetricSpec(QueryRunnerTestHelper.indexMetric)) .threshold(2) - .intervals(fullOnIntervalSpec.getIntervals()) + .intervals(QueryRunnerTestHelper.fullOnIntervalSpec.getIntervals()) .aggregators( Lists.newArrayList( Iterables.concat( - commonDoubleAggregators, + QueryRunnerTestHelper.commonDoubleAggregators, Lists.newArrayList( new DoubleMaxAggregatorFactory("maxIndex", "index"), new DoubleMinAggregatorFactory("minIndex", "index") @@ -118,13 +116,13 @@ public class TopNQueryTest public void testQuerySerdeWithAlphaNumericTopNMetricSpec() throws IOException { TopNQuery expectedQuery = new TopNQueryBuilder() - .dataSource(dataSource) - .granularity(allGran) - .dimension(new LegacyDimensionSpec(marketDimension)) + .dataSource(QueryRunnerTestHelper.dataSource) + .granularity(QueryRunnerTestHelper.allGran) + .dimension(new LegacyDimensionSpec(QueryRunnerTestHelper.marketDimension)) .metric(new DimensionTopNMetricSpec(null, StringComparators.ALPHANUMERIC)) .threshold(2) - .intervals(fullOnIntervalSpec.getIntervals()) - .aggregators(Collections.singletonList(rowsCount)) + .intervals(QueryRunnerTestHelper.fullOnIntervalSpec.getIntervals()) + .aggregators(Collections.singletonList(QueryRunnerTestHelper.rowsCount)) .build(); String jsonQuery = "{\n" + " \"queryType\": \"topN\",\n" @@ -156,5 +154,4 @@ public class TopNQueryTest ); Assert.assertEquals(expectedQuery, actualQuery); } - } diff --git a/processing/src/test/java/org/apache/druid/segment/IntIteratorUtilsTest.java b/processing/src/test/java/org/apache/druid/segment/IntIteratorUtilsTest.java index 3d195e3cfe7..24b143565e6 100644 --- a/processing/src/test/java/org/apache/druid/segment/IntIteratorUtilsTest.java +++ b/processing/src/test/java/org/apache/druid/segment/IntIteratorUtilsTest.java @@ -21,32 +21,29 @@ package org.apache.druid.segment; import it.unimi.dsi.fastutil.ints.IntIterators; import it.unimi.dsi.fastutil.ints.IntListIterator; +import org.junit.Assert; import org.junit.Test; -import static org.apache.druid.segment.IntIteratorUtils.skip; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; - public class IntIteratorUtilsTest { @Test public void testSkip() { - assertEquals(0, skip(IntIterators.EMPTY_ITERATOR, 5)); - assertEquals(0, skip(IntIterators.EMPTY_ITERATOR, 0)); + Assert.assertEquals(0, IntIteratorUtils.skip(IntIterators.EMPTY_ITERATOR, 5)); + Assert.assertEquals(0, IntIteratorUtils.skip(IntIterators.EMPTY_ITERATOR, 0)); IntListIterator it = IntIterators.fromTo(0, 10); - assertEquals(3, skip(it, 3)); - assertEquals(3, it.nextInt()); - assertEquals(6, skip(it, 100)); - assertEquals(0, skip(it, 100)); - assertFalse(it.hasNext()); + Assert.assertEquals(3, IntIteratorUtils.skip(it, 3)); + Assert.assertEquals(3, it.nextInt()); + Assert.assertEquals(6, IntIteratorUtils.skip(it, 100)); + Assert.assertEquals(0, IntIteratorUtils.skip(it, 100)); + Assert.assertFalse(it.hasNext()); } @Test(expected = IllegalArgumentException.class) public void testNegativeSkipArgument() { - skip(IntIterators.fromTo(0, 10), -1); + IntIteratorUtils.skip(IntIterators.fromTo(0, 10), -1); } } diff --git a/processing/src/test/java/org/apache/druid/segment/IntListUtilsTest.java b/processing/src/test/java/org/apache/druid/segment/IntListUtilsTest.java index 83006953fc2..dc1a6381506 100644 --- a/processing/src/test/java/org/apache/druid/segment/IntListUtilsTest.java +++ b/processing/src/test/java/org/apache/druid/segment/IntListUtilsTest.java @@ -20,17 +20,16 @@ package org.apache.druid.segment; import it.unimi.dsi.fastutil.ints.IntList; +import org.junit.Assert; import org.junit.Test; -import static org.junit.Assert.assertEquals; - public class IntListUtilsTest { @Test(expected = IndexOutOfBoundsException.class) public void testEmptyRangeIntList() { final IntList list = IntListUtils.fromTo(10, 10); - assertEquals(0, list.size()); + Assert.assertEquals(0, list.size()); list.getInt(0); } @@ -45,7 +44,7 @@ public class IntListUtilsTest { final IntList list = IntListUtils.fromTo(20, 120); for (int i = 0; i < 100; i++) { - assertEquals(i + 20, list.getInt(i)); + Assert.assertEquals(i + 20, list.getInt(i)); } } } diff --git a/processing/src/test/java/org/apache/druid/segment/MergeIntIteratorTest.java b/processing/src/test/java/org/apache/druid/segment/MergeIntIteratorTest.java index 9f532fef4e0..f0678b86cea 100644 --- a/processing/src/test/java/org/apache/druid/segment/MergeIntIteratorTest.java +++ b/processing/src/test/java/org/apache/druid/segment/MergeIntIteratorTest.java @@ -22,6 +22,7 @@ package org.apache.druid.segment; import com.google.common.collect.Lists; import it.unimi.dsi.fastutil.ints.IntArrayList; import it.unimi.dsi.fastutil.ints.IntIterator; +import it.unimi.dsi.fastutil.ints.IntIterators; import it.unimi.dsi.fastutil.ints.IntList; import it.unimi.dsi.fastutil.ints.IntLists; import org.junit.Assert; @@ -34,34 +35,32 @@ import java.util.List; import java.util.NoSuchElementException; import java.util.concurrent.ThreadLocalRandom; -import static it.unimi.dsi.fastutil.ints.IntIterators.EMPTY_ITERATOR; -import static java.lang.Integer.MAX_VALUE; -import static org.apache.druid.segment.IntIteratorUtils.mergeAscending; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.fail; - public class MergeIntIteratorTest { @Test(expected = NoSuchElementException.class) public void testNoIterators() { - IntIterator it = mergeAscending(Collections.emptyList()); + IntIterator it = IntIteratorUtils.mergeAscending(Collections.emptyList()); assertEmpty(it); } @Test(expected = NoSuchElementException.class) public void testMergeEmptyIterators() { - IntIterator it = mergeAscending(Arrays.asList(EMPTY_ITERATOR, EMPTY_ITERATOR)); + IntIterator it = IntIteratorUtils.mergeAscending(Arrays.asList( + IntIterators.EMPTY_ITERATOR, + IntIterators.EMPTY_ITERATOR + )); assertEmpty(it); } private static void assertEmpty(IntIterator it) { - assertFalse(it.hasNext()); + Assert.assertFalse(it.hasNext()); try { + //noinspection deprecation it.next(); - fail("expected NoSuchElementException on it.next() after it.hasNext() = false"); + Assert.fail("expected NoSuchElementException on it.next() after it.hasNext() = false"); } catch (NoSuchElementException ignore) { // expected @@ -82,14 +81,14 @@ public class MergeIntIteratorTest IntLists.singleton(Integer.MIN_VALUE), IntLists.singleton(-1), IntLists.singleton(0), - IntLists.singleton(MAX_VALUE) + IntLists.singleton(Integer.MAX_VALUE) ); for (int i = 0; i < lists.size() + 1; i++) { - assertAscending(mergeAscending(iteratorsFromLists(lists))); + assertAscending(IntIteratorUtils.mergeAscending(iteratorsFromLists(lists))); Collections.rotate(lists, 1); } Collections.shuffle(lists); - assertAscending(mergeAscending(iteratorsFromLists(lists))); + assertAscending(IntIteratorUtils.mergeAscending(iteratorsFromLists(lists))); } private static List iteratorsFromLists(List lists) @@ -115,12 +114,12 @@ public class MergeIntIteratorTest lists.get(r.nextInt(numIterators)).add(j); } for (int j = 0; j < lists.size() + 1; j++) { - assertAscending(mergeAscending(iteratorsFromLists(lists))); + assertAscending(IntIteratorUtils.mergeAscending(iteratorsFromLists(lists))); Collections.rotate(lists, 1); } for (int j = 0; j < 10; j++) { Collections.shuffle(lists); - assertAscending(mergeAscending(iteratorsFromLists(lists))); + assertAscending(IntIteratorUtils.mergeAscending(iteratorsFromLists(lists))); } } } diff --git a/processing/src/test/java/org/apache/druid/segment/data/CompressedColumnarIntsSerializerTest.java b/processing/src/test/java/org/apache/druid/segment/data/CompressedColumnarIntsSerializerTest.java index b8520031fdb..fefd2a7685d 100644 --- a/processing/src/test/java/org/apache/druid/segment/data/CompressedColumnarIntsSerializerTest.java +++ b/processing/src/test/java/org/apache/druid/segment/data/CompressedColumnarIntsSerializerTest.java @@ -34,6 +34,7 @@ import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMedium; import org.apache.druid.segment.writeout.SegmentWriteOutMedium; import org.apache.druid.segment.writeout.WriteOutBytes; import org.junit.After; +import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -49,8 +50,6 @@ import java.util.List; import java.util.Random; import java.util.Set; -import static org.junit.Assert.assertEquals; - @RunWith(Parameterized.class) public class CompressedColumnarIntsSerializerTest { @@ -135,7 +134,7 @@ public class CompressedColumnarIntsSerializerTest writer.writeTo(writeOutBytes, smoosher); smoosher.close(); - assertEquals(writtenLength, supplierFromList.getSerializedSize()); + Assert.assertEquals(writtenLength, supplierFromList.getSerializedSize()); // read from ByteBuffer and check values CompressedColumnarIntsSupplier supplierFromByteBuffer = CompressedColumnarIntsSupplier.fromByteBuffer( @@ -143,9 +142,9 @@ public class CompressedColumnarIntsSerializerTest byteOrder ); ColumnarInts columnarInts = supplierFromByteBuffer.get(); - assertEquals(vals.length, columnarInts.size()); + Assert.assertEquals(vals.length, columnarInts.size()); for (int i = 0; i < vals.length; ++i) { - assertEquals(vals[i], columnarInts.get(i)); + Assert.assertEquals(vals[i], columnarInts.get(i)); } CloseQuietly.close(columnarInts); } @@ -221,9 +220,9 @@ public class CompressedColumnarIntsSerializerTest byteOrder ); ColumnarInts columnarInts = supplierFromByteBuffer.get(); - assertEquals(vals.length, columnarInts.size()); + Assert.assertEquals(vals.length, columnarInts.size()); for (int i = 0; i < vals.length; ++i) { - assertEquals(vals[i], columnarInts.get(i)); + Assert.assertEquals(vals[i], columnarInts.get(i)); } CloseQuietly.close(columnarInts); mapper.close(); diff --git a/processing/src/test/java/org/apache/druid/segment/data/CompressedVSizeColumnarIntsSerializerTest.java b/processing/src/test/java/org/apache/druid/segment/data/CompressedVSizeColumnarIntsSerializerTest.java index 0c5ffa84200..150dd0d8955 100644 --- a/processing/src/test/java/org/apache/druid/segment/data/CompressedVSizeColumnarIntsSerializerTest.java +++ b/processing/src/test/java/org/apache/druid/segment/data/CompressedVSizeColumnarIntsSerializerTest.java @@ -34,6 +34,7 @@ import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMedium; import org.apache.druid.segment.writeout.SegmentWriteOutMedium; import org.apache.druid.segment.writeout.WriteOutBytes; import org.junit.After; +import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -48,8 +49,6 @@ import java.util.List; import java.util.Random; import java.util.Set; -import static org.junit.Assert.assertEquals; - @RunWith(Parameterized.class) public class CompressedVSizeColumnarIntsSerializerTest { @@ -135,7 +134,7 @@ public class CompressedVSizeColumnarIntsSerializerTest writer.writeTo(writeOutBytes, smoosher); smoosher.close(); - assertEquals(writtenLength, supplierFromList.getSerializedSize()); + Assert.assertEquals(writtenLength, supplierFromList.getSerializedSize()); // read from ByteBuffer and check values CompressedVSizeColumnarIntsSupplier supplierFromByteBuffer = CompressedVSizeColumnarIntsSupplier.fromByteBuffer( @@ -144,7 +143,7 @@ public class CompressedVSizeColumnarIntsSerializerTest ); ColumnarInts columnarInts = supplierFromByteBuffer.get(); for (int i = 0; i < vals.length; ++i) { - assertEquals(vals[i], columnarInts.get(i)); + Assert.assertEquals(vals[i], columnarInts.get(i)); } CloseQuietly.close(columnarInts); } @@ -219,7 +218,7 @@ public class CompressedVSizeColumnarIntsSerializerTest ColumnarInts columnarInts = supplierFromByteBuffer.get(); for (int i = 0; i < vals.length; ++i) { - assertEquals(vals[i], columnarInts.get(i)); + Assert.assertEquals(vals[i], columnarInts.get(i)); } CloseQuietly.close(columnarInts); mapper.close(); diff --git a/processing/src/test/java/org/apache/druid/segment/data/V3CompressedVSizeColumnarMultiIntsSerializerTest.java b/processing/src/test/java/org/apache/druid/segment/data/V3CompressedVSizeColumnarMultiIntsSerializerTest.java index e92b87e9012..9398ed4c331 100644 --- a/processing/src/test/java/org/apache/druid/segment/data/V3CompressedVSizeColumnarMultiIntsSerializerTest.java +++ b/processing/src/test/java/org/apache/druid/segment/data/V3CompressedVSizeColumnarMultiIntsSerializerTest.java @@ -32,6 +32,7 @@ import org.apache.druid.java.util.common.io.smoosh.SmooshedWriter; import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMedium; import org.apache.druid.segment.writeout.SegmentWriteOutMedium; import org.apache.druid.segment.writeout.WriteOutBytes; +import org.junit.Assert; import org.junit.Before; import org.junit.Rule; import org.junit.Test; @@ -50,8 +51,6 @@ import java.util.Random; import java.util.Set; import java.util.stream.IntStream; -import static org.junit.Assert.assertEquals; - @RunWith(Parameterized.class) public class V3CompressedVSizeColumnarMultiIntsSerializerTest { @@ -147,7 +146,7 @@ public class V3CompressedVSizeColumnarMultiIntsSerializerTest writer.writeTo(writeOutBytes, smoosher); smoosher.close(); - assertEquals(writtenLength, supplierFromIterable.getSerializedSize()); + Assert.assertEquals(writtenLength, supplierFromIterable.getSerializedSize()); // read from ByteBuffer and check values V3CompressedVSizeColumnarMultiIntsSupplier supplierFromByteBuffer = V3CompressedVSizeColumnarMultiIntsSupplier.fromByteBuffer( @@ -156,19 +155,19 @@ public class V3CompressedVSizeColumnarMultiIntsSerializerTest ); try (final ColumnarMultiInts columnarMultiInts = supplierFromByteBuffer.get()) { - assertEquals(columnarMultiInts.size(), vals.size()); + Assert.assertEquals(columnarMultiInts.size(), vals.size()); for (int i = 0; i < vals.size(); ++i) { IndexedInts subVals = columnarMultiInts.get(i); - assertEquals(subVals.size(), vals.get(i).length); + Assert.assertEquals(subVals.size(), vals.get(i).length); for (int j = 0, size = subVals.size(); j < size; ++j) { - assertEquals(subVals.get(j), vals.get(i)[j]); + Assert.assertEquals(subVals.get(j), vals.get(i)[j]); } } } } } - int getMaxValue(final List vals) + private int getMaxValue(final List vals) { return vals .stream() @@ -270,12 +269,12 @@ public class V3CompressedVSizeColumnarMultiIntsSerializerTest V3CompressedVSizeColumnarMultiIntsSupplier supplierFromByteBuffer = V3CompressedVSizeColumnarMultiIntsSupplier.fromByteBuffer(mapper.mapFile("test"), byteOrder); ColumnarMultiInts columnarMultiInts = supplierFromByteBuffer.get(); - assertEquals(columnarMultiInts.size(), vals.size()); + Assert.assertEquals(columnarMultiInts.size(), vals.size()); for (int i = 0; i < vals.size(); ++i) { IndexedInts subVals = columnarMultiInts.get(i); - assertEquals(subVals.size(), vals.get(i).length); + Assert.assertEquals(subVals.size(), vals.get(i).length); for (int j = 0, size = subVals.size(); j < size; ++j) { - assertEquals(subVals.get(j), vals.get(i)[j]); + Assert.assertEquals(subVals.get(j), vals.get(i)[j]); } } CloseQuietly.close(columnarMultiInts); diff --git a/processing/src/test/java/org/apache/druid/segment/data/VSizeColumnarIntsSerializerTest.java b/processing/src/test/java/org/apache/druid/segment/data/VSizeColumnarIntsSerializerTest.java index 29d7cb6f2a0..29f18016236 100644 --- a/processing/src/test/java/org/apache/druid/segment/data/VSizeColumnarIntsSerializerTest.java +++ b/processing/src/test/java/org/apache/druid/segment/data/VSizeColumnarIntsSerializerTest.java @@ -25,14 +25,13 @@ import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMedium; import org.apache.druid.segment.writeout.SegmentWriteOutMedium; import org.apache.druid.segment.writeout.WriteOutBytes; import org.junit.After; +import org.junit.Assert; import org.junit.Before; import org.junit.Test; import java.nio.ByteBuffer; import java.util.Random; -import static org.junit.Assert.assertEquals; - public class VSizeColumnarIntsSerializerTest { private static final int[] MAX_VALUES = new int[]{0xFF, 0xFFFF, 0xFFFFFF, 0x0FFFFFFF}; @@ -75,15 +74,15 @@ public class VSizeColumnarIntsSerializerTest WriteOutBytes writeOutBytes = segmentWriteOutMedium.makeWriteOutBytes(); writer.writeTo(writeOutBytes, null); - assertEquals(writtenLength, intsFromList.getSerializedSize()); + Assert.assertEquals(writtenLength, intsFromList.getSerializedSize()); // read from ByteBuffer and check values VSizeColumnarInts intsFromByteBuffer = VSizeColumnarInts.readFromByteBuffer( ByteBuffer.wrap(IOUtils.toByteArray(writeOutBytes.asInputStream())) ); - assertEquals(vals.length, intsFromByteBuffer.size()); + Assert.assertEquals(vals.length, intsFromByteBuffer.size()); for (int i = 0; i < vals.length; ++i) { - assertEquals(vals[i], intsFromByteBuffer.get(i)); + Assert.assertEquals(vals[i], intsFromByteBuffer.get(i)); } } diff --git a/processing/src/test/java/org/apache/druid/segment/filter/FiltersTest.java b/processing/src/test/java/org/apache/druid/segment/filter/FiltersTest.java index 3d27f4d6c5f..56b993cf537 100644 --- a/processing/src/test/java/org/apache/druid/segment/filter/FiltersTest.java +++ b/processing/src/test/java/org/apache/druid/segment/filter/FiltersTest.java @@ -27,12 +27,11 @@ import org.apache.druid.collections.bitmap.ImmutableBitmap; import org.apache.druid.collections.bitmap.MutableBitmap; import org.apache.druid.segment.IntIteratorUtils; import org.apache.druid.segment.column.BitmapIndex; +import org.junit.Assert; import org.junit.Test; import java.util.List; -import static org.junit.Assert.assertEquals; - public class FiltersTest { @Test @@ -48,7 +47,7 @@ public class FiltersTest 10000 ); final double expected = 0.1; - assertEquals(expected, estimated, 0.00001); + Assert.assertEquals(expected, estimated, 0.00001); } private static BitmapIndex getBitmapIndex(final List bitmapList) diff --git a/server/src/test/java/org/apache/druid/client/selector/ServerSelectorTest.java b/server/src/test/java/org/apache/druid/client/selector/ServerSelectorTest.java index 290b18470a9..61587d16536 100644 --- a/server/src/test/java/org/apache/druid/client/selector/ServerSelectorTest.java +++ b/server/src/test/java/org/apache/druid/client/selector/ServerSelectorTest.java @@ -32,19 +32,13 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; -import static org.easymock.EasyMock.expect; - -/** - */ public class ServerSelectorTest { - TierSelectorStrategy tierSelectorStrategy; - @Before public void setUp() { - tierSelectorStrategy = EasyMock.createMock(TierSelectorStrategy.class); - expect(tierSelectorStrategy.getComparator()).andReturn(Integer::compare).anyTimes(); + TierSelectorStrategy tierSelectorStrategy = EasyMock.createMock(TierSelectorStrategy.class); + EasyMock.expect(tierSelectorStrategy.getComparator()).andReturn(Integer::compare).anyTimes(); } @Test diff --git a/server/src/test/java/org/apache/druid/query/lookup/LookupReferencesManagerTest.java b/server/src/test/java/org/apache/druid/query/lookup/LookupReferencesManagerTest.java index 5c0f16815e7..fbcc7d98acb 100644 --- a/server/src/test/java/org/apache/druid/query/lookup/LookupReferencesManagerTest.java +++ b/server/src/test/java/org/apache/druid/query/lookup/LookupReferencesManagerTest.java @@ -43,11 +43,6 @@ import java.util.HashMap; import java.util.Map; import java.util.concurrent.TimeUnit; -import static org.easymock.EasyMock.createMock; -import static org.easymock.EasyMock.expect; -import static org.easymock.EasyMock.replay; -import static org.easymock.EasyMock.reset; - public class LookupReferencesManagerTest { private static final String LOOKUP_TIER = "lookupTier"; @@ -67,7 +62,7 @@ public class LookupReferencesManagerTest druidLeaderClient = EasyMock.createMock(DruidLeaderClient.class); - config = createMock(LookupListeningAnnouncerConfig.class); + config = EasyMock.createMock(LookupListeningAnnouncerConfig.class); lookupExtractorFactory = new MapLookupExtractorFactory( ImmutableMap.of( @@ -99,20 +94,20 @@ public class LookupReferencesManagerTest lookupMap.put("testMockForStartStop", container); String strResult = mapper.writeValueAsString(lookupMap); Request request = new Request(HttpMethod.GET, new URL("http://localhost:1234/xx")); - expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes(); - replay(config); - expect(druidLeaderClient.makeRequest( + EasyMock.expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes(); + EasyMock.replay(config); + EasyMock.expect(druidLeaderClient.makeRequest( HttpMethod.GET, "/druid/coordinator/v1/lookups/config/lookupTier?detailed=true" )) - .andReturn(request); + .andReturn(request); FullResponseHolder responseHolder = new FullResponseHolder( HttpResponseStatus.OK, EasyMock.createNiceMock(HttpResponse.class), new StringBuilder().append(strResult) ); - expect(druidLeaderClient.go(request)).andReturn(responseHolder); - replay(druidLeaderClient); + EasyMock.expect(druidLeaderClient.go(request)).andReturn(responseHolder); + EasyMock.replay(druidLeaderClient); Assert.assertFalse(lookupReferencesManager.lifecycleLock.awaitStarted(1, TimeUnit.MICROSECONDS)); Assert.assertNull(lookupReferencesManager.mainThread); Assert.assertNull(lookupReferencesManager.stateRef.get()); @@ -163,20 +158,20 @@ public class LookupReferencesManagerTest lookupMap.put("testMockForAddGetRemove", container); String strResult = mapper.writeValueAsString(lookupMap); Request request = new Request(HttpMethod.GET, new URL("http://localhost:1234/xx")); - expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes(); - replay(config); - expect(druidLeaderClient.makeRequest( + EasyMock.expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes(); + EasyMock.replay(config); + EasyMock.expect(druidLeaderClient.makeRequest( HttpMethod.GET, "/druid/coordinator/v1/lookups/config/lookupTier?detailed=true" )) - .andReturn(request); + .andReturn(request); FullResponseHolder responseHolder = new FullResponseHolder( HttpResponseStatus.OK, EasyMock.createNiceMock(HttpResponse.class), new StringBuilder().append(strResult) ); - expect(druidLeaderClient.go(request)).andReturn(responseHolder); - replay(druidLeaderClient); + EasyMock.expect(druidLeaderClient.go(request)).andReturn(responseHolder); + EasyMock.replay(druidLeaderClient); lookupReferencesManager.start(); Assert.assertNull(lookupReferencesManager.get("test")); @@ -204,20 +199,20 @@ public class LookupReferencesManagerTest lookupMap.put("testMockForCloseIsCalledAfterStopping", container); String strResult = mapper.writeValueAsString(lookupMap); Request request = new Request(HttpMethod.GET, new URL("http://localhost:1234/xx")); - expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes(); - replay(config); - expect(druidLeaderClient.makeRequest( + EasyMock.expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes(); + EasyMock.replay(config); + EasyMock.expect(druidLeaderClient.makeRequest( HttpMethod.GET, "/druid/coordinator/v1/lookups/config/lookupTier?detailed=true" )) - .andReturn(request); + .andReturn(request); FullResponseHolder responseHolder = new FullResponseHolder( HttpResponseStatus.OK, EasyMock.createNiceMock(HttpResponse.class), new StringBuilder().append(strResult) ); - expect(druidLeaderClient.go(request)).andReturn(responseHolder); - replay(druidLeaderClient); + EasyMock.expect(druidLeaderClient.go(request)).andReturn(responseHolder); + EasyMock.replay(druidLeaderClient); lookupReferencesManager.start(); lookupReferencesManager.add("testMock", new LookupExtractorFactoryContainer("0", lookupExtractorFactory)); lookupReferencesManager.handlePendingNotices(); @@ -238,20 +233,20 @@ public class LookupReferencesManagerTest lookupMap.put("testMockForDestroyIsCalledAfterRemove", container); String strResult = mapper.writeValueAsString(lookupMap); Request request = new Request(HttpMethod.GET, new URL("http://localhost:1234/xx")); - expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes(); - replay(config); - expect(druidLeaderClient.makeRequest( + EasyMock.expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes(); + EasyMock.replay(config); + EasyMock.expect(druidLeaderClient.makeRequest( HttpMethod.GET, "/druid/coordinator/v1/lookups/config/lookupTier?detailed=true" )) - .andReturn(request); + .andReturn(request); FullResponseHolder responseHolder = new FullResponseHolder( HttpResponseStatus.OK, EasyMock.createNiceMock(HttpResponse.class), new StringBuilder().append(strResult) ); - expect(druidLeaderClient.go(request)).andReturn(responseHolder); - replay(druidLeaderClient); + EasyMock.expect(druidLeaderClient.go(request)).andReturn(responseHolder); + EasyMock.replay(druidLeaderClient); lookupReferencesManager.start(); lookupReferencesManager.add("testMock", new LookupExtractorFactoryContainer("0", lookupExtractorFactory)); lookupReferencesManager.handlePendingNotices(); @@ -269,20 +264,20 @@ public class LookupReferencesManagerTest lookupMap.put("testMockForGetNotThere", container); String strResult = mapper.writeValueAsString(lookupMap); Request request = new Request(HttpMethod.GET, new URL("http://localhost:1234/xx")); - expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes(); - replay(config); - expect(druidLeaderClient.makeRequest( + EasyMock.expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes(); + EasyMock.replay(config); + EasyMock.expect(druidLeaderClient.makeRequest( HttpMethod.GET, "/druid/coordinator/v1/lookups/config/lookupTier?detailed=true" )) - .andReturn(request); + .andReturn(request); FullResponseHolder responseHolder = new FullResponseHolder( HttpResponseStatus.OK, EasyMock.createNiceMock(HttpResponse.class), new StringBuilder().append(strResult) ); - expect(druidLeaderClient.go(request)).andReturn(responseHolder); - replay(druidLeaderClient); + EasyMock.expect(druidLeaderClient.go(request)).andReturn(responseHolder); + EasyMock.replay(druidLeaderClient); lookupReferencesManager.start(); Assert.assertNull(lookupReferencesManager.get("notThere")); } @@ -302,20 +297,20 @@ public class LookupReferencesManagerTest lookupMap.put("testMockForUpdateWithHigherVersion", container); String strResult = mapper.writeValueAsString(lookupMap); Request request = new Request(HttpMethod.GET, new URL("http://localhost:1234/xx")); - expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes(); - replay(config); - expect(druidLeaderClient.makeRequest( + EasyMock.expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes(); + EasyMock.replay(config); + EasyMock.expect(druidLeaderClient.makeRequest( HttpMethod.GET, "/druid/coordinator/v1/lookups/config/lookupTier?detailed=true" )) - .andReturn(request); + .andReturn(request); FullResponseHolder responseHolder = new FullResponseHolder( HttpResponseStatus.OK, EasyMock.createNiceMock(HttpResponse.class), new StringBuilder().append(strResult) ); - expect(druidLeaderClient.go(request)).andReturn(responseHolder); - replay(druidLeaderClient); + EasyMock.expect(druidLeaderClient.go(request)).andReturn(responseHolder); + EasyMock.replay(druidLeaderClient); lookupReferencesManager.start(); lookupReferencesManager.add("testName", new LookupExtractorFactoryContainer("1", lookupExtractorFactory1)); lookupReferencesManager.handlePendingNotices(); @@ -339,20 +334,20 @@ public class LookupReferencesManagerTest lookupMap.put("testMockForUpdateWithLowerVersion", container); String strResult = mapper.writeValueAsString(lookupMap); Request request = new Request(HttpMethod.GET, new URL("http://localhost:1234/xx")); - expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes(); - replay(config); - expect(druidLeaderClient.makeRequest( + EasyMock.expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes(); + EasyMock.replay(config); + EasyMock.expect(druidLeaderClient.makeRequest( HttpMethod.GET, "/druid/coordinator/v1/lookups/config/lookupTier?detailed=true" )) - .andReturn(request); + .andReturn(request); FullResponseHolder responseHolder = new FullResponseHolder( HttpResponseStatus.OK, EasyMock.createNiceMock(HttpResponse.class), new StringBuilder().append(strResult) ); - expect(druidLeaderClient.go(request)).andReturn(responseHolder); - replay(druidLeaderClient); + EasyMock.expect(druidLeaderClient.go(request)).andReturn(responseHolder); + EasyMock.replay(druidLeaderClient); lookupReferencesManager.start(); lookupReferencesManager.add("testName", new LookupExtractorFactoryContainer("1", lookupExtractorFactory1)); lookupReferencesManager.handlePendingNotices(); @@ -370,20 +365,20 @@ public class LookupReferencesManagerTest lookupMap.put("testMockForRemoveNonExisting", container); String strResult = mapper.writeValueAsString(lookupMap); Request request = new Request(HttpMethod.GET, new URL("http://localhost:1234/xx")); - expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes(); - replay(config); - expect(druidLeaderClient.makeRequest( + EasyMock.expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes(); + EasyMock.replay(config); + EasyMock.expect(druidLeaderClient.makeRequest( HttpMethod.GET, "/druid/coordinator/v1/lookups/config/lookupTier?detailed=true" )) - .andReturn(request); + .andReturn(request); FullResponseHolder responseHolder = new FullResponseHolder( HttpResponseStatus.OK, EasyMock.createNiceMock(HttpResponse.class), new StringBuilder().append(strResult) ); - expect(druidLeaderClient.go(request)).andReturn(responseHolder); - replay(druidLeaderClient); + EasyMock.expect(druidLeaderClient.go(request)).andReturn(responseHolder); + EasyMock.replay(druidLeaderClient); lookupReferencesManager.start(); lookupReferencesManager.remove("test"); lookupReferencesManager.handlePendingNotices(); @@ -424,20 +419,20 @@ public class LookupReferencesManagerTest Map lookupMap = new HashMap<>(); String strResult = mapper.writeValueAsString(lookupMap); Request request = new Request(HttpMethod.GET, new URL("http://localhost:1234/xx")); - expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes(); - replay(config); - expect(druidLeaderClient.makeRequest( + EasyMock.expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes(); + EasyMock.replay(config); + EasyMock.expect(druidLeaderClient.makeRequest( HttpMethod.GET, "/druid/coordinator/v1/lookups/config/lookupTier?detailed=true" )) - .andReturn(request); + .andReturn(request); FullResponseHolder responseHolder = new FullResponseHolder( HttpResponseStatus.OK, EasyMock.createNiceMock(HttpResponse.class), new StringBuilder().append(strResult) ); - expect(druidLeaderClient.go(request)).andReturn(responseHolder); - replay(druidLeaderClient); + EasyMock.expect(druidLeaderClient.go(request)).andReturn(responseHolder); + EasyMock.replay(druidLeaderClient); lookupReferencesManager.start(); lookupReferencesManager.add("one", container1); lookupReferencesManager.add("two", container2); @@ -469,20 +464,20 @@ public class LookupReferencesManagerTest lookupMap.put("testMockForRealModeWithMainThread", container); String strResult = mapper.writeValueAsString(lookupMap); Request request = new Request(HttpMethod.GET, new URL("http://localhost:1234/xx")); - expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes(); - replay(config); - expect(druidLeaderClient.makeRequest( + EasyMock.expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes(); + EasyMock.replay(config); + EasyMock.expect(druidLeaderClient.makeRequest( HttpMethod.GET, "/druid/coordinator/v1/lookups/config/lookupTier?detailed=true" )) - .andReturn(request); + .andReturn(request); FullResponseHolder responseHolder = new FullResponseHolder( HttpResponseStatus.OK, EasyMock.createNiceMock(HttpResponse.class), new StringBuilder().append(strResult) ); - expect(druidLeaderClient.go(request)).andReturn(responseHolder); - replay(druidLeaderClient); + EasyMock.expect(druidLeaderClient.go(request)).andReturn(responseHolder); + EasyMock.replay(druidLeaderClient); lookupReferencesManager.start(); Assert.assertTrue(lookupReferencesManager.mainThread.isAlive()); @@ -548,20 +543,20 @@ public class LookupReferencesManagerTest lookupMap.put("testLookup3", container3); String strResult = mapper.writeValueAsString(lookupMap); Request request = new Request(HttpMethod.GET, new URL("http://localhost:1234/xx")); - expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes(); - replay(config); - expect(druidLeaderClient.makeRequest( + EasyMock.expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes(); + EasyMock.replay(config); + EasyMock.expect(druidLeaderClient.makeRequest( HttpMethod.GET, "/druid/coordinator/v1/lookups/config/lookupTier?detailed=true" )) - .andReturn(request); + .andReturn(request); FullResponseHolder responseHolder = new FullResponseHolder( HttpResponseStatus.OK, EasyMock.createNiceMock(HttpResponse.class), new StringBuilder().append(strResult) ); - expect(druidLeaderClient.go(request)).andReturn(responseHolder); - replay(druidLeaderClient); + EasyMock.expect(druidLeaderClient.go(request)).andReturn(responseHolder); + EasyMock.replay(druidLeaderClient); lookupReferencesManager.start(); Assert.assertEquals(container1, lookupReferencesManager.get("testLookup1")); @@ -592,21 +587,21 @@ public class LookupReferencesManagerTest lookupMap.put("testMockForLoadLookupOnCoordinatorFailure", container); String strResult = mapper.writeValueAsString(lookupMap); Request request = new Request(HttpMethod.GET, new URL("http://localhost:1234/xx")); - expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes(); - replay(config); - expect(druidLeaderClient.makeRequest( + EasyMock.expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes(); + EasyMock.replay(config); + EasyMock.expect(druidLeaderClient.makeRequest( HttpMethod.GET, "/druid/coordinator/v1/lookups/config/lookupTier?detailed=true" )) - .andReturn(request) - .anyTimes(); + .andReturn(request) + .anyTimes(); FullResponseHolder responseHolder = new FullResponseHolder( HttpResponseStatus.NOT_FOUND, EasyMock.createNiceMock(HttpResponse.class), new StringBuilder().append(strResult) ); - expect(druidLeaderClient.go(request)).andThrow(new IllegalStateException()).anyTimes(); - replay(druidLeaderClient); + EasyMock.expect(druidLeaderClient.go(request)).andThrow(new IllegalStateException()).anyTimes(); + EasyMock.replay(druidLeaderClient); lookupReferencesManager.start(); lookupReferencesManager.add("testMockForLoadLookupOnCoordinatorFailure", container); @@ -628,18 +623,18 @@ public class LookupReferencesManagerTest config, true ); - reset(config); - reset(druidLeaderClient); - expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes(); - replay(config); - expect(druidLeaderClient.makeRequest( + EasyMock.reset(config); + EasyMock.reset(druidLeaderClient); + EasyMock.expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes(); + EasyMock.replay(config); + EasyMock.expect(druidLeaderClient.makeRequest( HttpMethod.GET, "/druid/coordinator/v1/lookups/config/lookupTier?detailed=true" )) - .andReturn(request) - .anyTimes(); - expect(druidLeaderClient.go(request)).andThrow(new IllegalStateException()).anyTimes(); - replay(druidLeaderClient); + .andReturn(request) + .anyTimes(); + EasyMock.expect(druidLeaderClient.go(request)).andThrow(new IllegalStateException()).anyTimes(); + EasyMock.replay(druidLeaderClient); lookupReferencesManager.start(); Assert.assertEquals(container, lookupReferencesManager.get("testMockForLoadLookupOnCoordinatorFailure")); } @@ -665,19 +660,19 @@ public class LookupReferencesManagerTest lookupMap.put("testMockForDisableLookupSync", container); String strResult = mapper.writeValueAsString(lookupMap); Request request = new Request(HttpMethod.GET, new URL("http://localhost:1234/xx")); - expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes(); - replay(config); - expect(druidLeaderClient.makeRequest( + EasyMock.expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes(); + EasyMock.replay(config); + EasyMock.expect(druidLeaderClient.makeRequest( HttpMethod.GET, "/druid/coordinator/v1/lookups/config/lookupTier?detailed=true" )) - .andReturn(request); + .andReturn(request); FullResponseHolder responseHolder = new FullResponseHolder( HttpResponseStatus.OK, EasyMock.createNiceMock(HttpResponse.class), new StringBuilder().append(strResult) ); - expect(druidLeaderClient.go(request)).andReturn(responseHolder); + EasyMock.expect(druidLeaderClient.go(request)).andReturn(responseHolder); lookupReferencesManager.start(); Assert.assertNull(lookupReferencesManager.get("testMockForDisableLookupSync")); diff --git a/server/src/test/java/org/apache/druid/server/StatusResourceTest.java b/server/src/test/java/org/apache/druid/server/StatusResourceTest.java index 8b184aa47eb..5e238cce671 100644 --- a/server/src/test/java/org/apache/druid/server/StatusResourceTest.java +++ b/server/src/test/java/org/apache/druid/server/StatusResourceTest.java @@ -36,18 +36,14 @@ import java.util.List; import java.util.Map; import java.util.Set; -import static org.apache.druid.server.StatusResource.ModuleVersion; - -/** - */ public class StatusResourceTest { @Test public void testLoadedModules() { - Collection modules = ImmutableList.of((DruidModule) new InitializationTest.TestDruidModule()); - List statusResourceModuleList = new StatusResource.Status(modules).getModules(); + Collection modules = ImmutableList.of(new InitializationTest.TestDruidModule()); + List statusResourceModuleList = new StatusResource.Status(modules).getModules(); Assert.assertEquals("Status should have all modules loaded!", modules.size(), statusResourceModuleList.size()); @@ -55,7 +51,7 @@ public class StatusResourceTest String moduleName = module.getClass().getCanonicalName(); boolean contains = Boolean.FALSE; - for (ModuleVersion version : statusResourceModuleList) { + for (StatusResource.ModuleVersion version : statusResourceModuleList) { if (version.getName().equals(moduleName)) { contains = Boolean.TRUE; } @@ -75,4 +71,3 @@ public class StatusResourceTest hiddenProperties.forEach((property) -> Assert.assertNull(returnedProperties.get(property))); } } - diff --git a/server/src/test/java/org/apache/druid/server/coordinator/DruidCoordinatorBalancerTest.java b/server/src/test/java/org/apache/druid/server/coordinator/DruidCoordinatorBalancerTest.java index 0a3089808c6..e094c3a3494 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/DruidCoordinatorBalancerTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/DruidCoordinatorBalancerTest.java @@ -30,6 +30,7 @@ import org.apache.druid.server.coordination.ServerType; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.partition.NoneShardSpec; import org.easymock.EasyMock; +import org.hamcrest.Matchers; import org.joda.time.DateTime; import org.joda.time.Interval; import org.junit.After; @@ -47,13 +48,6 @@ import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import java.util.stream.IntStream; -import static org.easymock.EasyMock.anyObject; -import static org.easymock.EasyMock.replay; -import static org.hamcrest.Matchers.equalTo; -import static org.hamcrest.Matchers.is; - -/** - */ public class DruidCoordinatorBalancerTest { private static final int MAX_SEGMENTS_TO_MOVE = 5; @@ -222,14 +216,14 @@ public class DruidCoordinatorBalancerTest EasyMock.expect(strategy.pickSegmentToMove(ImmutableList.of(new ServerHolder(druidServer2, peon2, false)))) .andReturn(new BalancerSegmentHolder(druidServer2, segment3)) .andReturn(new BalancerSegmentHolder(druidServer2, segment4)); - EasyMock.expect(strategy.pickSegmentToMove(anyObject())) + EasyMock.expect(strategy.pickSegmentToMove(EasyMock.anyObject())) .andReturn(new BalancerSegmentHolder(druidServer1, segment1)) .andReturn(new BalancerSegmentHolder(druidServer1, segment2)); - EasyMock.expect(strategy.findNewSegmentHomeBalancer(anyObject(), anyObject())) + EasyMock.expect(strategy.findNewSegmentHomeBalancer(EasyMock.anyObject(), EasyMock.anyObject())) .andReturn(new ServerHolder(druidServer3, peon3)) .anyTimes(); - replay(strategy); + EasyMock.replay(strategy); DruidCoordinatorRuntimeParams params = defaultRuntimeParamsBuilder( ImmutableList.of(druidServer1, druidServer2, druidServer3), @@ -247,7 +241,10 @@ public class DruidCoordinatorBalancerTest params = new DruidCoordinatorBalancerTester(coordinator).run(params); Assert.assertEquals(3L, params.getCoordinatorStats().getTieredStat("movedCount", "normal")); - Assert.assertThat(peon3.getSegmentsToLoad(), is(equalTo(ImmutableSet.of(segment1, segment3, segment4)))); + Assert.assertThat( + peon3.getSegmentsToLoad(), + Matchers.is(Matchers.equalTo(ImmutableSet.of(segment1, segment3, segment4))) + ); } @Test @@ -256,7 +253,7 @@ public class DruidCoordinatorBalancerTest DruidCoordinatorRuntimeParams params = setupParamsForDecommissioningMaxPercentOfMaxSegmentsToMove(0); params = new DruidCoordinatorBalancerTester(coordinator).run(params); Assert.assertEquals(1L, params.getCoordinatorStats().getTieredStat("movedCount", "normal")); - Assert.assertThat(peon3.getSegmentsToLoad(), is(equalTo(ImmutableSet.of(segment1)))); + Assert.assertThat(peon3.getSegmentsToLoad(), Matchers.is(Matchers.equalTo(ImmutableSet.of(segment1)))); } @Test @@ -265,7 +262,7 @@ public class DruidCoordinatorBalancerTest DruidCoordinatorRuntimeParams params = setupParamsForDecommissioningMaxPercentOfMaxSegmentsToMove(10); params = new DruidCoordinatorBalancerTester(coordinator).run(params); Assert.assertEquals(1L, params.getCoordinatorStats().getTieredStat("movedCount", "normal")); - Assert.assertThat(peon3.getSegmentsToLoad(), is(equalTo(ImmutableSet.of(segment2)))); + Assert.assertThat(peon3.getSegmentsToLoad(), Matchers.is(Matchers.equalTo(ImmutableSet.of(segment2)))); } /** @@ -283,16 +280,16 @@ public class DruidCoordinatorBalancerTest mockCoordinator(coordinator); BalancerStrategy strategy = EasyMock.createMock(BalancerStrategy.class); - EasyMock.expect(strategy.pickSegmentToMove(anyObject())) - .andReturn(new BalancerSegmentHolder(druidServer1, segment1)) - .andReturn(new BalancerSegmentHolder(druidServer1, segment2)) - .andReturn(new BalancerSegmentHolder(druidServer2, segment3)) - .andReturn(new BalancerSegmentHolder(druidServer2, segment4)); + EasyMock.expect(strategy.pickSegmentToMove(EasyMock.anyObject())) + .andReturn(new BalancerSegmentHolder(druidServer1, segment1)) + .andReturn(new BalancerSegmentHolder(druidServer1, segment2)) + .andReturn(new BalancerSegmentHolder(druidServer2, segment3)) + .andReturn(new BalancerSegmentHolder(druidServer2, segment4)); - EasyMock.expect(strategy.findNewSegmentHomeBalancer(anyObject(), anyObject())) + EasyMock.expect(strategy.findNewSegmentHomeBalancer(EasyMock.anyObject(), EasyMock.anyObject())) .andReturn(new ServerHolder(druidServer3, peon3)) .anyTimes(); - replay(strategy); + EasyMock.replay(strategy); DruidCoordinatorRuntimeParams params = defaultRuntimeParamsBuilder( ImmutableList.of(druidServer1, druidServer2, druidServer3), @@ -300,14 +297,20 @@ public class DruidCoordinatorBalancerTest ImmutableList.of(false, false, false) ) .withDynamicConfigs( - CoordinatorDynamicConfig.builder().withMaxSegmentsToMove(3).withDecommissioningMaxPercentOfMaxSegmentsToMove(9).build() + CoordinatorDynamicConfig.builder() + .withMaxSegmentsToMove(3) + .withDecommissioningMaxPercentOfMaxSegmentsToMove(9) + .build() ) .withBalancerStrategy(strategy) .build(); params = new DruidCoordinatorBalancerTester(coordinator).run(params); Assert.assertEquals(3L, params.getCoordinatorStats().getTieredStat("movedCount", "normal")); - Assert.assertThat(peon3.getSegmentsToLoad(), is(equalTo(ImmutableSet.of(segment1, segment2, segment3)))); + Assert.assertThat( + peon3.getSegmentsToLoad(), + Matchers.is(Matchers.equalTo(ImmutableSet.of(segment1, segment2, segment3))) + ); } /** @@ -319,20 +322,20 @@ public class DruidCoordinatorBalancerTest mockDruidServer(druidServer1, "1", "normal", 30L, 100L, segments); mockDruidServer(druidServer2, "2", "normal", 0L, 100L, Collections.emptyList()); - replay(druidServer3); - replay(druidServer4); + EasyMock.replay(druidServer3); + EasyMock.replay(druidServer4); mockCoordinator(coordinator); BalancerStrategy strategy = EasyMock.createMock(BalancerStrategy.class); - EasyMock.expect(strategy.pickSegmentToMove(anyObject())) + EasyMock.expect(strategy.pickSegmentToMove(EasyMock.anyObject())) .andReturn(new BalancerSegmentHolder(druidServer1, segment1)) .anyTimes(); - EasyMock.expect(strategy.findNewSegmentHomeBalancer(anyObject(), anyObject())).andAnswer(() -> { + EasyMock.expect(strategy.findNewSegmentHomeBalancer(EasyMock.anyObject(), EasyMock.anyObject())).andAnswer(() -> { List holders = (List) EasyMock.getCurrentArguments()[1]; return holders.get(0); }).anyTimes(); - replay(strategy); + EasyMock.replay(strategy); DruidCoordinatorRuntimeParams params = defaultRuntimeParamsBuilder( ImmutableList.of(druidServer1, druidServer2), @@ -352,18 +355,20 @@ public class DruidCoordinatorBalancerTest mockDruidServer(druidServer1, "1", "normal", 30L, 100L, segments); mockDruidServer(druidServer2, "2", "normal", 0L, 100L, Collections.emptyList()); - replay(druidServer3); - replay(druidServer4); + EasyMock.replay(druidServer3); + EasyMock.replay(druidServer4); mockCoordinator(coordinator); ServerHolder holder2 = new ServerHolder(druidServer2, peon2, false); BalancerStrategy strategy = EasyMock.createMock(BalancerStrategy.class); - EasyMock.expect(strategy.pickSegmentToMove(anyObject())) + EasyMock.expect(strategy.pickSegmentToMove(EasyMock.anyObject())) .andReturn(new BalancerSegmentHolder(druidServer1, segment1)) .once(); - EasyMock.expect(strategy.findNewSegmentHomeBalancer(anyObject(), anyObject())).andReturn(holder2).once(); - replay(strategy); + EasyMock.expect(strategy.findNewSegmentHomeBalancer(EasyMock.anyObject(), EasyMock.anyObject())) + .andReturn(holder2) + .once(); + EasyMock.replay(strategy); DruidCoordinatorRuntimeParams params = defaultRuntimeParamsBuilder( ImmutableList.of(druidServer1, druidServer2), @@ -566,8 +571,8 @@ public class DruidCoordinatorBalancerTest s -> EasyMock.expect(druidServer.getSegment(s.getId())).andReturn(s).anyTimes() ); } - EasyMock.expect(druidServer.getSegment(anyObject())).andReturn(null).anyTimes(); - replay(druidServer); + EasyMock.expect(druidServer.getSegment(EasyMock.anyObject())).andReturn(null).anyTimes(); + EasyMock.replay(druidServer); } private static void mockCoordinator(DruidCoordinator coordinator) @@ -588,7 +593,7 @@ public class DruidCoordinatorBalancerTest private final List pickOrder; private final AtomicInteger pickCounter = new AtomicInteger(0); - public PredefinedPickOrderBalancerStrategy( + PredefinedPickOrderBalancerStrategy( BalancerStrategy delegate, List pickOrder ) @@ -636,12 +641,12 @@ public class DruidCoordinatorBalancerTest BalancerStrategy strategy = EasyMock.createMock(BalancerStrategy.class); EasyMock.expect(strategy.pickSegmentToMove(ImmutableList.of(new ServerHolder(druidServer2, peon2, true)))) .andReturn(new BalancerSegmentHolder(druidServer2, segment2)); - EasyMock.expect(strategy.pickSegmentToMove(anyObject())) + EasyMock.expect(strategy.pickSegmentToMove(EasyMock.anyObject())) .andReturn(new BalancerSegmentHolder(druidServer1, segment1)); - EasyMock.expect(strategy.findNewSegmentHomeBalancer(anyObject(), anyObject())) + EasyMock.expect(strategy.findNewSegmentHomeBalancer(EasyMock.anyObject(), EasyMock.anyObject())) .andReturn(new ServerHolder(druidServer3, peon3)) .anyTimes(); - replay(strategy); + EasyMock.replay(strategy); return defaultRuntimeParamsBuilder( ImmutableList.of(druidServer1, druidServer2, druidServer3), diff --git a/server/src/test/java/org/apache/druid/server/coordinator/cost/SegmentsCostCacheTest.java b/server/src/test/java/org/apache/druid/server/coordinator/cost/SegmentsCostCacheTest.java index 4a647f39821..f0ae22094fe 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/cost/SegmentsCostCacheTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/cost/SegmentsCostCacheTest.java @@ -23,6 +23,7 @@ import org.apache.druid.java.util.common.DateTimes; import org.apache.druid.timeline.DataSegment; import org.joda.time.DateTime; import org.joda.time.Interval; +import org.junit.Assert; import org.junit.Test; import java.util.ArrayList; @@ -32,10 +33,6 @@ import java.util.UUID; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.TimeUnit; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - public class SegmentsCostCacheTest { @@ -49,7 +46,7 @@ public class SegmentsCostCacheTest SegmentsCostCache.Builder cacheBuilder = SegmentsCostCache.builder(); cacheBuilder.addSegment(createSegment(DATA_SOURCE, shifted1HInterval(REFERENCE_TIME, 0), 100)); SegmentsCostCache cache = cacheBuilder.build(); - assertEquals( + Assert.assertEquals( 7.8735899489011E-4, cache.cost(createSegment(DATA_SOURCE, shifted1HInterval(REFERENCE_TIME, -2), 100)), EPSILON @@ -64,7 +61,7 @@ public class SegmentsCostCacheTest createSegment(DATA_SOURCE, shifted1HInterval(REFERENCE_TIME, 0), 100) ); SegmentsCostCache cache = cacheBuilder.build(); - assertEquals( + Assert.assertEquals( 0, cache.cost(createSegment(DATA_SOURCE, shifted1HInterval(REFERENCE_TIME, (int) TimeUnit.DAYS.toHours(50)), 100)), EPSILON @@ -86,7 +83,7 @@ public class SegmentsCostCacheTest SegmentsCostCache.Bucket bucket = prototype.build(); double segmentCost = bucket.cost(segmentB); - assertEquals(7.8735899489011E-4, segmentCost, EPSILON); + Assert.assertEquals(7.8735899489011E-4, segmentCost, EPSILON); } @Test @@ -105,8 +102,8 @@ public class SegmentsCostCacheTest prototype.addSegment(segmentA); SegmentsCostCache.Bucket bucket = prototype.build(); - assertTrue(bucket.inCalculationInterval(segmentA)); - assertFalse(bucket.inCalculationInterval(segmentB)); + Assert.assertTrue(bucket.inCalculationInterval(segmentA)); + Assert.assertFalse(bucket.inCalculationInterval(segmentB)); } @Test @@ -124,7 +121,7 @@ public class SegmentsCostCacheTest SegmentsCostCache.Bucket bucket = prototype.build(); double segmentCost = bucket.cost(segmentB); - assertEquals(8.26147353873985E-4, segmentCost, EPSILON); + Assert.assertEquals(8.26147353873985E-4, segmentCost, EPSILON); } @Test @@ -145,7 +142,7 @@ public class SegmentsCostCacheTest double segmentCost = bucket.cost(segmentB); - assertEquals(0.001574717989780039, segmentCost, EPSILON); + Assert.assertEquals(0.001574717989780039, segmentCost, EPSILON); } @Test @@ -167,10 +164,10 @@ public class SegmentsCostCacheTest SegmentsCostCache.Bucket bucket = prototype.build(); double cost = bucket.cost(referenceSegment); - assertEquals(0.7065117101966677, cost, EPSILON); + Assert.assertEquals(0.7065117101966677, cost, EPSILON); } - public static Interval shifted1HInterval(DateTime REFERENCE_TIME, int shiftInHours) + private static Interval shifted1HInterval(DateTime REFERENCE_TIME, int shiftInHours) { return new Interval( REFERENCE_TIME.plusHours(shiftInHours), @@ -178,7 +175,7 @@ public class SegmentsCostCacheTest ); } - public static DataSegment createSegment(String dataSource, Interval interval, long size) + private static DataSegment createSegment(String dataSource, Interval interval, long size) { return new DataSegment( dataSource, diff --git a/server/src/test/java/org/apache/druid/server/coordinator/rules/BroadcastDistributionRuleSerdeTest.java b/server/src/test/java/org/apache/druid/server/coordinator/rules/BroadcastDistributionRuleSerdeTest.java index 5f6cbf53c3b..e3b51a51b02 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/rules/BroadcastDistributionRuleSerdeTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/rules/BroadcastDistributionRuleSerdeTest.java @@ -26,6 +26,7 @@ import com.google.common.collect.Lists; import org.apache.druid.jackson.DefaultObjectMapper; import org.apache.druid.java.util.common.Intervals; import org.joda.time.Period; +import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; @@ -34,8 +35,6 @@ import java.io.IOException; import java.util.Collections; import java.util.List; -import static org.junit.Assert.assertEquals; - @RunWith(Parameterized.class) public class BroadcastDistributionRuleSerdeTest { @@ -44,7 +43,7 @@ public class BroadcastDistributionRuleSerdeTest @Parameterized.Parameters public static List constructorFeeder() { - final List params = Lists.newArrayList( + return Lists.newArrayList( new Object[]{new ForeverBroadcastDistributionRule(ImmutableList.of("large_source1", "large_source2"))}, new Object[]{new ForeverBroadcastDistributionRule(ImmutableList.of())}, new Object[]{new ForeverBroadcastDistributionRule(null)}, @@ -55,7 +54,6 @@ public class BroadcastDistributionRuleSerdeTest new Object[]{new PeriodBroadcastDistributionRule(new Period(1000), null, ImmutableList.of())}, new Object[]{new PeriodBroadcastDistributionRule(new Period(1000), null, null)} ); - return params; } private final Rule testRule; @@ -71,6 +69,6 @@ public class BroadcastDistributionRuleSerdeTest final List rules = Collections.singletonList(testRule); final String json = MAPPER.writeValueAsString(rules); final List fromJson = MAPPER.readValue(json, new TypeReference>(){}); - assertEquals(rules, fromJson); + Assert.assertEquals(rules, fromJson); } } diff --git a/server/src/test/java/org/apache/druid/server/coordinator/rules/BroadcastDistributionRuleTest.java b/server/src/test/java/org/apache/druid/server/coordinator/rules/BroadcastDistributionRuleTest.java index adb34479272..431f6de1985 100644 --- a/server/src/test/java/org/apache/druid/server/coordinator/rules/BroadcastDistributionRuleTest.java +++ b/server/src/test/java/org/apache/druid/server/coordinator/rules/BroadcastDistributionRuleTest.java @@ -33,6 +33,7 @@ import org.apache.druid.server.coordinator.SegmentReplicantLookup; import org.apache.druid.server.coordinator.ServerHolder; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.partition.NoneShardSpec; +import org.junit.Assert; import org.junit.Before; import org.junit.Test; @@ -44,16 +45,12 @@ import java.util.TreeSet; import java.util.stream.Collectors; import java.util.stream.Stream; -import static org.junit.Assert.assertEquals; -import static org.junit.Assert.assertFalse; -import static org.junit.Assert.assertTrue; - public class BroadcastDistributionRuleTest { private DruidCluster druidCluster; private ServerHolder holderOfSmallSegment; - private List holdersOfLargeSegments = new ArrayList<>(); - private List holdersOfLargeSegments2 = new ArrayList<>(); + private final List holdersOfLargeSegments = new ArrayList<>(); + private final List holdersOfLargeSegments2 = new ArrayList<>(); private final List largeSegments = new ArrayList<>(); private final List largeSegments2 = new ArrayList<>(); private DataSegment smallSegment; @@ -297,20 +294,20 @@ public class BroadcastDistributionRuleTest smallSegment ); - assertEquals(3L, stats.getGlobalStat(LoadRule.ASSIGNED_COUNT)); - assertEquals(false, stats.hasPerTierStats()); + Assert.assertEquals(3L, stats.getGlobalStat(LoadRule.ASSIGNED_COUNT)); + Assert.assertFalse(stats.hasPerTierStats()); - assertTrue( + Assert.assertTrue( holdersOfLargeSegments.stream() .allMatch(holder -> holder.getPeon().getSegmentsToLoad().contains(smallSegment)) ); - assertTrue( + Assert.assertTrue( holdersOfLargeSegments2.stream() .noneMatch(holder -> holder.getPeon().getSegmentsToLoad().contains(smallSegment)) ); - assertFalse(holderOfSmallSegment.getPeon().getSegmentsToLoad().contains(smallSegment)); + Assert.assertFalse(holderOfSmallSegment.getPeon().getSegmentsToLoad().contains(smallSegment)); } /** @@ -320,7 +317,7 @@ public class BroadcastDistributionRuleTest * active | large segment * decommissioning1 | small segment * decommissioning2 | large segment - * + *

* After running the rule for the small segment: * active | large & small segments * decommissioning1 | @@ -345,12 +342,12 @@ public class BroadcastDistributionRuleTest smallSegment ); - assertEquals(1L, stats.getGlobalStat(LoadRule.ASSIGNED_COUNT)); - assertEquals(false, stats.hasPerTierStats()); + Assert.assertEquals(1L, stats.getGlobalStat(LoadRule.ASSIGNED_COUNT)); + Assert.assertFalse(stats.hasPerTierStats()); - assertEquals(1, activeServer.getPeon().getSegmentsToLoad().size()); - assertEquals(1, decommissioningServer1.getPeon().getSegmentsToDrop().size()); - assertEquals(0, decommissioningServer2.getPeon().getSegmentsToLoad().size()); + Assert.assertEquals(1, activeServer.getPeon().getSegmentsToLoad().size()); + Assert.assertEquals(1, decommissioningServer1.getPeon().getSegmentsToDrop().size()); + Assert.assertEquals(0, decommissioningServer2.getPeon().getSegmentsToLoad().size()); } @Test @@ -377,20 +374,20 @@ public class BroadcastDistributionRuleTest smallSegment ); - assertEquals(5L, stats.getGlobalStat(LoadRule.ASSIGNED_COUNT)); - assertEquals(false, stats.hasPerTierStats()); + Assert.assertEquals(5L, stats.getGlobalStat(LoadRule.ASSIGNED_COUNT)); + Assert.assertFalse(stats.hasPerTierStats()); - assertTrue( + Assert.assertTrue( holdersOfLargeSegments.stream() .allMatch(holder -> holder.getPeon().getSegmentsToLoad().contains(smallSegment)) ); - assertTrue( + Assert.assertTrue( holdersOfLargeSegments2.stream() .allMatch(holder -> holder.getPeon().getSegmentsToLoad().contains(smallSegment)) ); - assertFalse(holderOfSmallSegment.getPeon().getSegmentsToLoad().contains(smallSegment)); + Assert.assertFalse(holderOfSmallSegment.getPeon().getSegmentsToLoad().contains(smallSegment)); } @Test @@ -415,10 +412,10 @@ public class BroadcastDistributionRuleTest smallSegment ); - assertEquals(6L, stats.getGlobalStat(LoadRule.ASSIGNED_COUNT)); - assertEquals(false, stats.hasPerTierStats()); + Assert.assertEquals(6L, stats.getGlobalStat(LoadRule.ASSIGNED_COUNT)); + Assert.assertFalse(stats.hasPerTierStats()); - assertTrue( + Assert.assertTrue( druidCluster.getAllServers().stream() .allMatch(holder -> holder.getPeon().getSegmentsToLoad().contains(smallSegment)) );