Remove static imports from tests (#8036)

Make static imports forbidden in tests and remove all occurrences to be
consistent with the non-test code.

Also, various changes to files affected by above:
- Reformat to adhere to druid style guide
- Fix various IntelliJ warnings
- Fix various SonarLint warnings (e.g., the expected/actual args to
  Assert.assertEquals() were flipped)
This commit is contained in:
Chi Cao Minh 2019-07-06 09:33:12 -07:00 committed by Gian Merlino
parent 4f4cda3ec9
commit 1166bbcb75
99 changed files with 3986 additions and 4267 deletions

View File

@ -29,9 +29,6 @@
<!-- Code copied from TestNG to apply a bugfix -->
<suppress checks="AvoidStaticImport" files="[\\/]org[\\/]testng[\\/]" />
<!-- Our tests have many legacy usages of static imports; enforcement is impratical until those are removed. -->
<suppress checks="AvoidStaticImport" files="[\\/]src[\\/]test[\\/]" />
<suppress checks="Header" files="[\\/]target[\\/]generated-test-sources[\\/]" />
<suppress checks="NeedBraces" files="[\\/]target[\\/]generated-test-sources[\\/]" />
<suppress checks="UnusedImports" files="[\\/]target[\\/]generated-test-sources[\\/]" />

View File

@ -22,6 +22,7 @@ package org.apache.druid.collections;
import com.google.common.base.Suppliers;
import org.apache.druid.java.util.common.concurrent.Execs;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
@ -34,11 +35,6 @@ import java.util.concurrent.ExecutionException;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Future;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
public class BlockingPoolTest
{
private ExecutorService service;
@ -85,10 +81,10 @@ public class BlockingPoolTest
public void testTake()
{
final ReferenceCountingResourceHolder<Integer> holder = pool.take(100);
assertNotNull(holder);
assertEquals(9, pool.getPoolSize());
Assert.assertNotNull(holder);
Assert.assertEquals(9, pool.getPoolSize());
holder.close();
assertEquals(10, pool.getPoolSize());
Assert.assertEquals(10, pool.getPoolSize());
}
@Test(timeout = 60_000L)
@ -96,7 +92,7 @@ public class BlockingPoolTest
{
final List<ReferenceCountingResourceHolder<Integer>> batchHolder = pool.takeBatch(10, 100L);
final ReferenceCountingResourceHolder<Integer> holder = pool.take(100);
assertNull(holder);
Assert.assertNull(holder);
batchHolder.forEach(ReferenceCountingResourceHolder::close);
}
@ -104,20 +100,20 @@ public class BlockingPoolTest
public void testTakeBatch()
{
final List<ReferenceCountingResourceHolder<Integer>> holder = pool.takeBatch(6, 100L);
assertNotNull(holder);
assertEquals(6, holder.size());
assertEquals(4, pool.getPoolSize());
Assert.assertNotNull(holder);
Assert.assertEquals(6, holder.size());
Assert.assertEquals(4, pool.getPoolSize());
holder.forEach(ReferenceCountingResourceHolder::close);
assertEquals(10, pool.getPoolSize());
Assert.assertEquals(10, pool.getPoolSize());
}
@Test(timeout = 60_000L)
public void testWaitAndTakeBatch() throws InterruptedException, ExecutionException
{
List<ReferenceCountingResourceHolder<Integer>> batchHolder = pool.takeBatch(10, 10);
assertNotNull(batchHolder);
assertEquals(10, batchHolder.size());
assertEquals(0, pool.getPoolSize());
Assert.assertNotNull(batchHolder);
Assert.assertEquals(10, batchHolder.size());
Assert.assertEquals(0, pool.getPoolSize());
final Future<List<ReferenceCountingResourceHolder<Integer>>> future = service.submit(
() -> pool.takeBatch(8, 100)
@ -126,19 +122,19 @@ public class BlockingPoolTest
batchHolder.forEach(ReferenceCountingResourceHolder::close);
batchHolder = future.get();
assertNotNull(batchHolder);
assertEquals(8, batchHolder.size());
assertEquals(2, pool.getPoolSize());
Assert.assertNotNull(batchHolder);
Assert.assertEquals(8, batchHolder.size());
Assert.assertEquals(2, pool.getPoolSize());
batchHolder.forEach(ReferenceCountingResourceHolder::close);
assertEquals(10, pool.getPoolSize());
Assert.assertEquals(10, pool.getPoolSize());
}
@Test(timeout = 60_000L)
public void testTakeBatchTooManyObjects()
{
final List<ReferenceCountingResourceHolder<Integer>> holder = pool.takeBatch(100, 100L);
assertTrue(holder.isEmpty());
Assert.assertTrue(holder.isEmpty());
}
@Test(timeout = 60_000L)
@ -148,39 +144,29 @@ public class BlockingPoolTest
final int limit2 = pool.maxSize() - limit1 + 1;
final Future<List<ReferenceCountingResourceHolder<Integer>>> f1 = service.submit(
new Callable<List<ReferenceCountingResourceHolder<Integer>>>()
{
@Override
public List<ReferenceCountingResourceHolder<Integer>> call()
{
List<ReferenceCountingResourceHolder<Integer>> result = new ArrayList<>();
for (int i = 0; i < limit1; i++) {
result.add(pool.take(10));
}
return result;
() -> {
List<ReferenceCountingResourceHolder<Integer>> result = new ArrayList<>();
for (int i = 0; i < limit1; i++) {
result.add(pool.take(10));
}
return result;
}
);
final Future<List<ReferenceCountingResourceHolder<Integer>>> f2 = service.submit(
new Callable<List<ReferenceCountingResourceHolder<Integer>>>()
{
@Override
public List<ReferenceCountingResourceHolder<Integer>> call()
{
List<ReferenceCountingResourceHolder<Integer>> result = new ArrayList<>();
for (int i = 0; i < limit2; i++) {
result.add(pool.take(10));
}
return result;
() -> {
List<ReferenceCountingResourceHolder<Integer>> result = new ArrayList<>();
for (int i = 0; i < limit2; i++) {
result.add(pool.take(10));
}
return result;
}
);
final List<ReferenceCountingResourceHolder<Integer>> r1 = f1.get();
final List<ReferenceCountingResourceHolder<Integer>> r2 = f2.get();
assertEquals(0, pool.getPoolSize());
assertTrue(r1.contains(null) || r2.contains(null));
Assert.assertEquals(0, pool.getPoolSize());
Assert.assertTrue(r1.contains(null) || r2.contains(null));
int nonNullCount = 0;
for (ReferenceCountingResourceHolder<Integer> holder : r1) {
@ -194,29 +180,19 @@ public class BlockingPoolTest
nonNullCount++;
}
}
assertEquals(pool.maxSize(), nonNullCount);
Assert.assertEquals(pool.maxSize(), nonNullCount);
final Future future1 = service.submit(new Runnable()
{
@Override
public void run()
{
for (ReferenceCountingResourceHolder<Integer> holder : r1) {
if (holder != null) {
holder.close();
}
final Future future1 = service.submit(() -> {
for (ReferenceCountingResourceHolder<Integer> holder : r1) {
if (holder != null) {
holder.close();
}
}
});
final Future future2 = service.submit(new Runnable()
{
@Override
public void run()
{
for (ReferenceCountingResourceHolder<Integer> holder : r2) {
if (holder != null) {
holder.close();
}
final Future future2 = service.submit(() -> {
for (ReferenceCountingResourceHolder<Integer> holder : r2) {
if (holder != null) {
holder.close();
}
}
});
@ -224,7 +200,7 @@ public class BlockingPoolTest
future1.get();
future2.get();
assertEquals(pool.maxSize(), pool.getPoolSize());
Assert.assertEquals(pool.maxSize(), pool.getPoolSize());
}
@Test(timeout = 60_000L)
@ -243,18 +219,18 @@ public class BlockingPoolTest
final List<ReferenceCountingResourceHolder<Integer>> r2 = f2.get();
if (r1 != null) {
assertTrue(r2.isEmpty());
assertEquals(pool.maxSize() - batch1, pool.getPoolSize());
assertEquals(batch1, r1.size());
Assert.assertTrue(r2.isEmpty());
Assert.assertEquals(pool.maxSize() - batch1, pool.getPoolSize());
Assert.assertEquals(batch1, r1.size());
r1.forEach(ReferenceCountingResourceHolder::close);
} else {
assertNotNull(r2);
assertEquals(pool.maxSize() - batch2, pool.getPoolSize());
assertEquals(batch2, r2.size());
Assert.assertNotNull(r2);
Assert.assertEquals(pool.maxSize() - batch2, pool.getPoolSize());
Assert.assertEquals(batch2, r2.size());
r2.forEach(ReferenceCountingResourceHolder::close);
}
assertEquals(pool.maxSize(), pool.getPoolSize());
Assert.assertEquals(pool.maxSize(), pool.getPoolSize());
}
@Test(timeout = 60_000L)
@ -272,35 +248,22 @@ public class BlockingPoolTest
final List<ReferenceCountingResourceHolder<Integer>> r1 = f1.get();
final List<ReferenceCountingResourceHolder<Integer>> r2 = f2.get();
assertNotNull(r1);
assertNotNull(r2);
assertEquals(batch1, r1.size());
assertEquals(batch2, r2.size());
assertEquals(0, pool.getPoolSize());
Assert.assertNotNull(r1);
Assert.assertNotNull(r2);
Assert.assertEquals(batch1, r1.size());
Assert.assertEquals(batch2, r2.size());
Assert.assertEquals(0, pool.getPoolSize());
final Future future1 = service.submit(new Runnable()
{
@Override
public void run()
{
r1.forEach(ReferenceCountingResourceHolder::close);
}
});
final Future future2 = service.submit(new Runnable()
{
@Override
public void run()
{
r2.forEach(ReferenceCountingResourceHolder::close);
}
});
final Future future1 = service.submit(() -> r1.forEach(ReferenceCountingResourceHolder::close));
final Future future2 = service.submit(() -> r2.forEach(ReferenceCountingResourceHolder::close));
future1.get();
future2.get();
assertEquals(pool.maxSize(), pool.getPoolSize());
Assert.assertEquals(pool.maxSize(), pool.getPoolSize());
}
@SuppressWarnings("CatchMayIgnoreException")
@Test(timeout = 60_000L)
public void testConcurrentTakeBatchClose() throws ExecutionException, InterruptedException
{
@ -309,28 +272,23 @@ public class BlockingPoolTest
final Callable<List<ReferenceCountingResourceHolder<Integer>>> c2 = () -> pool.takeBatch(10, 100);
final Future<List<ReferenceCountingResourceHolder<Integer>>> f2 = service.submit(c2);
final Future f1 = service.submit(new Runnable()
{
@Override
public void run()
{
try {
Thread.sleep(50);
}
catch (InterruptedException e) {
// ignore
}
r1.forEach(ReferenceCountingResourceHolder::close);
final Future f1 = service.submit(() -> {
try {
Thread.sleep(50);
}
catch (InterruptedException e) {
// ignore
}
r1.forEach(ReferenceCountingResourceHolder::close);
});
final List<ReferenceCountingResourceHolder<Integer>> r2 = f2.get();
f1.get();
assertNotNull(r2);
assertEquals(10, r2.size());
assertEquals(0, pool.getPoolSize());
Assert.assertNotNull(r2);
Assert.assertEquals(10, r2.size());
Assert.assertEquals(0, pool.getPoolSize());
r2.forEach(ReferenceCountingResourceHolder::close);
assertEquals(pool.maxSize(), pool.getPoolSize());
Assert.assertEquals(pool.maxSize(), pool.getPoolSize());
}
}

View File

@ -40,9 +40,6 @@ import java.util.List;
import java.util.Map;
import java.util.Properties;
import static org.apache.druid.java.util.emitter.core.EmitterTest.okResponse;
import static org.junit.Assert.assertEquals;
public class ParametrizedUriEmitterTest
{
private static final ObjectMapper jsonMapper = new ObjectMapper();
@ -71,7 +68,7 @@ public class ParametrizedUriEmitterTest
props.setProperty("org.apache.druid.java.util.emitter.recipientBaseUrlPattern", uriPattern);
lifecycle = new Lifecycle();
Emitter emitter = Emitters.create(props, httpClient, lifecycle);
assertEquals(ParametrizedUriEmitter.class, emitter.getClass());
Assert.assertEquals(ParametrizedUriEmitter.class, emitter.getClass());
lifecycle.start();
return emitter;
}
@ -107,7 +104,7 @@ public class ParametrizedUriEmitterTest
StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString()
);
return GoHandlers.immediateFuture(okResponse());
return GoHandlers.immediateFuture(EmitterTest.okResponse());
}
}.times(1)
);
@ -140,7 +137,7 @@ public class ParametrizedUriEmitterTest
request.getUrl(),
StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString()
);
return GoHandlers.immediateFuture(okResponse());
return GoHandlers.immediateFuture(EmitterTest.okResponse());
}
}.times(2)
);
@ -152,7 +149,8 @@ public class ParametrizedUriEmitterTest
Assert.assertTrue(httpClient.succeeded());
Map<String, String> expected = ImmutableMap.of(
"http://example.com/test1", StringUtils.format("[%s]\n", jsonMapper.writeValueAsString(events.get(0))),
"http://example.com/test2", StringUtils.format("[%s]\n", jsonMapper.writeValueAsString(events.get(1))));
"http://example.com/test2", StringUtils.format("[%s]\n", jsonMapper.writeValueAsString(events.get(1)))
);
Assert.assertEquals(expected, results);
}
@ -181,7 +179,7 @@ public class ParametrizedUriEmitterTest
StandardCharsets.UTF_8.decode(request.getByteBufferData().slice()).toString()
);
return GoHandlers.immediateFuture(okResponse());
return GoHandlers.immediateFuture(EmitterTest.okResponse());
}
}.times(1)
);
@ -209,7 +207,9 @@ public class ParametrizedUriEmitterTest
Assert.assertEquals(
e.getMessage(),
StringUtils.format(
"ParametrizedUriExtractor with pattern http://example.com/{keyNotSetInEvents} requires keyNotSetInEvents to be set in event, but found %s", event.toMap())
"ParametrizedUriExtractor with pattern http://example.com/{keyNotSetInEvents} requires keyNotSetInEvents to be set in event, but found %s",
event.toMap()
)
);
}
}

View File

@ -22,61 +22,83 @@ package org.apache.druid.timeline.partition;
import org.junit.Assert;
import org.junit.Test;
import static org.apache.druid.timeline.partition.IntegerPartitionChunk.make;
/**
*/
public class IntegerPartitionChunkTest
{
@Test
public void testAbuts()
{
IntegerPartitionChunk<Integer> lhs = make(null, 10, 0, 1);
IntegerPartitionChunk<Integer> lhs = IntegerPartitionChunk.make(null, 10, 0, 1);
Assert.assertTrue(lhs.abuts(make(10, null, 1, 2)));
Assert.assertFalse(lhs.abuts(make(11, null, 2, 3)));
Assert.assertFalse(lhs.abuts(make(null, null, 3, 4)));
Assert.assertTrue(lhs.abuts(IntegerPartitionChunk.make(10, null, 1, 2)));
Assert.assertFalse(lhs.abuts(IntegerPartitionChunk.make(11, null, 2, 3)));
Assert.assertFalse(lhs.abuts(IntegerPartitionChunk.make(null, null, 3, 4)));
Assert.assertFalse(make(null, null, 0, 1).abuts(make(null, null, 1, 2)));
Assert.assertFalse(IntegerPartitionChunk.make(null, null, 0, 1)
.abuts(IntegerPartitionChunk.make(null, null, 1, 2)));
}
@Test
public void testIsStart()
{
Assert.assertTrue(make(null, 10, 0, 1).isStart());
Assert.assertFalse(make(10, null, 0, 1).isStart());
Assert.assertFalse(make(10, 11, 0, 1).isStart());
Assert.assertTrue(make(null, null, 0, 1).isStart());
Assert.assertTrue(IntegerPartitionChunk.make(null, 10, 0, 1).isStart());
Assert.assertFalse(IntegerPartitionChunk.make(10, null, 0, 1).isStart());
Assert.assertFalse(IntegerPartitionChunk.make(10, 11, 0, 1).isStart());
Assert.assertTrue(IntegerPartitionChunk.make(null, null, 0, 1).isStart());
}
@Test
public void testIsEnd()
{
Assert.assertFalse(make(null, 10, 0, 1).isEnd());
Assert.assertTrue(make(10, null, 0, 1).isEnd());
Assert.assertFalse(make(10, 11, 0, 1).isEnd());
Assert.assertTrue(make(null, null, 0, 1).isEnd());
Assert.assertFalse(IntegerPartitionChunk.make(null, 10, 0, 1).isEnd());
Assert.assertTrue(IntegerPartitionChunk.make(10, null, 0, 1).isEnd());
Assert.assertFalse(IntegerPartitionChunk.make(10, 11, 0, 1).isEnd());
Assert.assertTrue(IntegerPartitionChunk.make(null, null, 0, 1).isEnd());
}
@Test
public void testCompareTo()
{
Assert.assertEquals(0, make(null, null, 0, 1).compareTo(make(null, null, 0, 1)));
Assert.assertEquals(0, make(10, null, 0, 1).compareTo(make(10, null, 0, 2)));
Assert.assertEquals(0, make(null, 10, 0, 1).compareTo(make(null, 10, 0, 2)));
Assert.assertEquals(0, make(10, 11, 0, 1).compareTo(make(10, 11, 0, 2)));
Assert.assertEquals(-1, make(null, 10, 0, 1).compareTo(make(10, null, 1, 2)));
Assert.assertEquals(-1, make(11, 20, 0, 1).compareTo(make(20, 33, 1, 1)));
Assert.assertEquals(1, make(20, 33, 1, 1).compareTo(make(11, 20, 0, 1)));
Assert.assertEquals(1, make(10, null, 1, 1).compareTo(make(null, 10, 0, 1)));
//noinspection EqualsWithItself (the intention of this first test is specifically to call compareTo with itself)
Assert.assertEquals(
0,
IntegerPartitionChunk.make(null, null, 0, 1).compareTo(IntegerPartitionChunk.make(null, null, 0, 1))
);
Assert.assertEquals(
0,
IntegerPartitionChunk.make(10, null, 0, 1).compareTo(IntegerPartitionChunk.make(10, null, 0, 2))
);
Assert.assertEquals(
0,
IntegerPartitionChunk.make(null, 10, 0, 1).compareTo(IntegerPartitionChunk.make(null, 10, 0, 2))
);
Assert.assertEquals(
0,
IntegerPartitionChunk.make(10, 11, 0, 1).compareTo(IntegerPartitionChunk.make(10, 11, 0, 2))
);
Assert.assertEquals(
-1,
IntegerPartitionChunk.make(null, 10, 0, 1).compareTo(IntegerPartitionChunk.make(10, null, 1, 2))
);
Assert.assertEquals(
-1,
IntegerPartitionChunk.make(11, 20, 0, 1).compareTo(IntegerPartitionChunk.make(20, 33, 1, 1))
);
Assert.assertEquals(
1,
IntegerPartitionChunk.make(20, 33, 1, 1).compareTo(IntegerPartitionChunk.make(11, 20, 0, 1))
);
Assert.assertEquals(
1,
IntegerPartitionChunk.make(10, null, 1, 1).compareTo(IntegerPartitionChunk.make(null, 10, 0, 1))
);
}
@Test
public void testEquals()
{
Assert.assertEquals(make(null, null, 0, 1), make(null, null, 0, 1));
Assert.assertEquals(make(null, 10, 0, 1), make(null, 10, 0, 1));
Assert.assertEquals(make(10, null, 0, 1), make(10, null, 0, 1));
Assert.assertEquals(make(10, 11, 0, 1), make(10, 11, 0, 1));
Assert.assertEquals(IntegerPartitionChunk.make(null, null, 0, 1), IntegerPartitionChunk.make(null, null, 0, 1));
Assert.assertEquals(IntegerPartitionChunk.make(null, 10, 0, 1), IntegerPartitionChunk.make(null, 10, 0, 1));
Assert.assertEquals(IntegerPartitionChunk.make(10, null, 0, 1), IntegerPartitionChunk.make(10, null, 0, 1));
Assert.assertEquals(IntegerPartitionChunk.make(10, 11, 0, 1), IntegerPartitionChunk.make(10, 11, 0, 1));
}
}

View File

@ -22,61 +22,89 @@ package org.apache.druid.timeline.partition;
import org.junit.Assert;
import org.junit.Test;
import static org.apache.druid.timeline.partition.StringPartitionChunk.make;
/**
*/
public class StringPartitionChunkTest
{
@Test
public void testAbuts()
{
StringPartitionChunk<Integer> lhs = make(null, "10", 0, 1);
StringPartitionChunk<Integer> lhs = StringPartitionChunk.make(null, "10", 0, 1);
Assert.assertTrue(lhs.abuts(make("10", null, 1, 2)));
Assert.assertFalse(lhs.abuts(make("11", null, 2, 3)));
Assert.assertFalse(lhs.abuts(make(null, null, 3, 4)));
Assert.assertTrue(lhs.abuts(StringPartitionChunk.make("10", null, 1, 2)));
Assert.assertFalse(lhs.abuts(StringPartitionChunk.make("11", null, 2, 3)));
Assert.assertFalse(lhs.abuts(StringPartitionChunk.make(null, null, 3, 4)));
Assert.assertFalse(make(null, null, 0, 1).abuts(make(null, null, 1, 2)));
Assert.assertFalse(StringPartitionChunk.make(null, null, 0, 1).abuts(StringPartitionChunk.make(null, null, 1, 2)));
}
@Test
public void testIsStart()
{
Assert.assertTrue(make(null, "10", 0, 1).isStart());
Assert.assertFalse(make("10", null, 0, 1).isStart());
Assert.assertFalse(make("10", "11", 0, 1).isStart());
Assert.assertTrue(make(null, null, 0, 1).isStart());
Assert.assertTrue(StringPartitionChunk.make(null, "10", 0, 1).isStart());
Assert.assertFalse(StringPartitionChunk.make("10", null, 0, 1).isStart());
Assert.assertFalse(StringPartitionChunk.make("10", "11", 0, 1).isStart());
Assert.assertTrue(StringPartitionChunk.make(null, null, 0, 1).isStart());
}
@Test
public void testIsEnd()
{
Assert.assertFalse(make(null, "10", 0, 1).isEnd());
Assert.assertTrue(make("10", null, 0, 1).isEnd());
Assert.assertFalse(make("10", "11", 0, 1).isEnd());
Assert.assertTrue(make(null, null, 0, 1).isEnd());
Assert.assertFalse(StringPartitionChunk.make(null, "10", 0, 1).isEnd());
Assert.assertTrue(StringPartitionChunk.make("10", null, 0, 1).isEnd());
Assert.assertFalse(StringPartitionChunk.make("10", "11", 0, 1).isEnd());
Assert.assertTrue(StringPartitionChunk.make(null, null, 0, 1).isEnd());
}
@Test
public void testCompareTo()
{
Assert.assertEquals(0, make(null, null, 0, 1).compareTo(make(null, null, 0, 2)));
Assert.assertEquals(0, make("10", null, 0, 1).compareTo(make("10", null, 0, 2)));
Assert.assertEquals(0, make(null, "10", 1, 1).compareTo(make(null, "10", 1, 2)));
Assert.assertEquals(0, make("10", "11", 1, 1).compareTo(make("10", "11", 1, 2)));
Assert.assertEquals(-1, make(null, "10", 0, 1).compareTo(make("10", null, 1, 2)));
Assert.assertEquals(-1, make("11", "20", 0, 1).compareTo(make("20", "33", 1, 1)));
Assert.assertEquals(1, make("20", "33", 1, 1).compareTo(make("11", "20", 0, 1)));
Assert.assertEquals(1, make("10", null, 1, 1).compareTo(make(null, "10", 0, 1)));
Assert.assertEquals(
0,
StringPartitionChunk.make(null, null, 0, 1)
.compareTo(StringPartitionChunk.make(null, null, 0, 2))
);
Assert.assertEquals(
0,
StringPartitionChunk.make("10", null, 0, 1)
.compareTo(StringPartitionChunk.make("10", null, 0, 2))
);
Assert.assertEquals(
0,
StringPartitionChunk.make(null, "10", 1, 1)
.compareTo(StringPartitionChunk.make(null, "10", 1, 2))
);
Assert.assertEquals(
0,
StringPartitionChunk.make("10", "11", 1, 1)
.compareTo(StringPartitionChunk.make("10", "11", 1, 2))
);
Assert.assertEquals(
-1,
StringPartitionChunk.make(null, "10", 0, 1)
.compareTo(StringPartitionChunk.make("10", null, 1, 2))
);
Assert.assertEquals(
-1,
StringPartitionChunk.make("11", "20", 0, 1)
.compareTo(StringPartitionChunk.make("20", "33", 1, 1))
);
Assert.assertEquals(
1,
StringPartitionChunk.make("20", "33", 1, 1)
.compareTo(StringPartitionChunk.make("11", "20", 0, 1))
);
Assert.assertEquals(
1,
StringPartitionChunk.make("10", null, 1, 1)
.compareTo(StringPartitionChunk.make(null, "10", 0, 1))
);
}
@Test
public void testEquals()
{
Assert.assertEquals(make(null, null, 0, 1), make(null, null, 0, 1));
Assert.assertEquals(make(null, "10", 0, 1), make(null, "10", 0, 1));
Assert.assertEquals(make("10", null, 0, 1), make("10", null, 0, 1));
Assert.assertEquals(make("10", "11", 0, 1), make("10", "11", 0, 1));
Assert.assertEquals(StringPartitionChunk.make(null, null, 0, 1), StringPartitionChunk.make(null, null, 0, 1));
Assert.assertEquals(StringPartitionChunk.make(null, "10", 0, 1), StringPartitionChunk.make(null, "10", 0, 1));
Assert.assertEquals(StringPartitionChunk.make("10", null, 0, 1), StringPartitionChunk.make("10", null, 0, 1));
Assert.assertEquals(StringPartitionChunk.make("10", "11", 0, 1), StringPartitionChunk.make("10", "11", 0, 1));
}
}

View File

@ -20,6 +20,7 @@
package org.apache.druid.storage.azure;
import com.microsoft.azure.storage.StorageException;
import org.easymock.EasyMock;
import org.easymock.EasyMockSupport;
import org.junit.Test;
@ -27,8 +28,6 @@ import java.io.IOException;
import java.io.InputStream;
import java.net.URISyntaxException;
import static org.easymock.EasyMock.expect;
public class AzureByteSourceTest extends EasyMockSupport
{
@ -40,7 +39,7 @@ public class AzureByteSourceTest extends EasyMockSupport
AzureStorage azureStorage = createMock(AzureStorage.class);
InputStream stream = createMock(InputStream.class);
expect(azureStorage.getBlobInputStream(containerName, blobPath)).andReturn(stream);
EasyMock.expect(azureStorage.getBlobInputStream(containerName, blobPath)).andReturn(stream);
replayAll();
@ -58,7 +57,7 @@ public class AzureByteSourceTest extends EasyMockSupport
final String blobPath = "/path/to/file";
AzureStorage azureStorage = createMock(AzureStorage.class);
expect(azureStorage.getBlobInputStream(containerName, blobPath)).andThrow(
EasyMock.expect(azureStorage.getBlobInputStream(containerName, blobPath)).andThrow(
new StorageException(
"",
"",

View File

@ -25,6 +25,7 @@ import org.apache.druid.java.util.common.Intervals;
import org.apache.druid.segment.loading.SegmentLoadingException;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.NoneShardSpec;
import org.easymock.EasyMock;
import org.easymock.EasyMockSupport;
import org.junit.Before;
import org.junit.Test;
@ -34,19 +35,16 @@ import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import static org.easymock.EasyMock.expect;
public class AzureDataSegmentKillerTest extends EasyMockSupport
{
private static final String CONTAINER_NAME = "container";
private static final String BLOB_PATH = "test/2015-04-12T00:00:00.000Z_2015-04-13T00:00:00.000Z/1/0/index.zip";
private static final String containerName = "container";
private static final String blobPath = "test/2015-04-12T00:00:00.000Z_2015-04-13T00:00:00.000Z/1/0/index.zip";
private static final DataSegment dataSegment = new DataSegment(
private static final DataSegment DATA_SEGMENT = new DataSegment(
"test",
Intervals.of("2015-04-12/2015-04-13"),
"1",
ImmutableMap.of("containerName", containerName, "blobPath", blobPath),
ImmutableMap.of("containerName", CONTAINER_NAME, "blobPath", BLOB_PATH),
null,
null,
NoneShardSpec.instance(),
@ -67,15 +65,15 @@ public class AzureDataSegmentKillerTest extends EasyMockSupport
{
List<String> deletedFiles = new ArrayList<>();
final String dirPath = Paths.get(blobPath).getParent().toString();
final String dirPath = Paths.get(BLOB_PATH).getParent().toString();
expect(azureStorage.emptyCloudBlobDirectory(containerName, dirPath)).andReturn(deletedFiles);
EasyMock.expect(azureStorage.emptyCloudBlobDirectory(CONTAINER_NAME, dirPath)).andReturn(deletedFiles);
replayAll();
AzureDataSegmentKiller killer = new AzureDataSegmentKiller(azureStorage);
killer.kill(dataSegment);
killer.kill(DATA_SEGMENT);
verifyAll();
}
@ -84,9 +82,9 @@ public class AzureDataSegmentKillerTest extends EasyMockSupport
public void killWithErrorTest() throws SegmentLoadingException, URISyntaxException, StorageException
{
String dirPath = Paths.get(blobPath).getParent().toString();
String dirPath = Paths.get(BLOB_PATH).getParent().toString();
expect(azureStorage.emptyCloudBlobDirectory(containerName, dirPath)).andThrow(
EasyMock.expect(azureStorage.emptyCloudBlobDirectory(CONTAINER_NAME, dirPath)).andThrow(
new StorageException(
"",
"",
@ -100,7 +98,7 @@ public class AzureDataSegmentKillerTest extends EasyMockSupport
AzureDataSegmentKiller killer = new AzureDataSegmentKiller(azureStorage);
killer.kill(dataSegment);
killer.kill(DATA_SEGMENT);
verifyAll();
}

View File

@ -22,7 +22,9 @@ package org.apache.druid.storage.azure;
import com.microsoft.azure.storage.StorageException;
import org.apache.druid.java.util.common.FileUtils;
import org.apache.druid.segment.loading.SegmentLoadingException;
import org.easymock.EasyMock;
import org.easymock.EasyMockSupport;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
@ -33,11 +35,6 @@ import java.io.InputStream;
import java.net.URISyntaxException;
import java.nio.file.Files;
import static org.easymock.EasyMock.expect;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class AzureDataSegmentPullerTest extends EasyMockSupport
{
@ -61,7 +58,7 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport
try {
final InputStream zipStream = new FileInputStream(pulledFile);
expect(azureStorage.getBlobInputStream(containerName, blobPath)).andReturn(zipStream);
EasyMock.expect(azureStorage.getBlobInputStream(containerName, blobPath)).andReturn(zipStream);
replayAll();
@ -70,9 +67,9 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport
FileUtils.FileCopyResult result = puller.getSegmentFiles(containerName, blobPath, toDir);
File expected = new File(toDir, SEGMENT_FILE_NAME);
assertEquals(value.length(), result.size());
assertTrue(expected.exists());
assertEquals(value.length(), expected.length());
Assert.assertEquals(value.length(), result.size());
Assert.assertTrue(expected.exists());
Assert.assertEquals(value.length(), expected.length());
verifyAll();
}
@ -89,7 +86,7 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport
final File outDir = Files.createTempDirectory("druid").toFile();
try {
expect(azureStorage.getBlobInputStream(containerName, blobPath)).andThrow(
EasyMock.expect(azureStorage.getBlobInputStream(containerName, blobPath)).andThrow(
new URISyntaxException(
"error",
"error",
@ -103,13 +100,12 @@ public class AzureDataSegmentPullerTest extends EasyMockSupport
puller.getSegmentFiles(containerName, blobPath, outDir);
assertFalse(outDir.exists());
Assert.assertFalse(outDir.exists());
verifyAll();
}
finally {
org.apache.commons.io.FileUtils.deleteDirectory(outDir);
}
}
}

View File

@ -29,6 +29,7 @@ import org.apache.druid.java.util.common.MapUtils;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.NoneShardSpec;
import org.easymock.EasyMock;
import org.easymock.EasyMockSupport;
import org.junit.Assert;
import org.junit.Before;
@ -44,9 +45,6 @@ import java.util.HashMap;
import java.util.Map;
import java.util.regex.Pattern;
import static org.easymock.EasyMock.expectLastCall;
import static org.junit.Assert.assertEquals;
public class AzureDataSegmentPusherTest extends EasyMockSupport
{
@Rule
@ -134,7 +132,7 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport
final String storageDir = pusher.getStorageDir(dataSegment, false);
final String azurePath = pusher.getAzurePath(dataSegment, false);
assertEquals(
Assert.assertEquals(
StringUtils.format("%s/%s", storageDir, AzureStorageDruidModule.INDEX_ZIP_FILE_NAME),
azurePath
);
@ -149,7 +147,7 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport
final String azurePath = pusher.getAzurePath(dataSegment, false);
azureStorage.uploadBlob(compressedSegmentData, containerName, azurePath);
expectLastCall();
EasyMock.expectLastCall();
replayAll();
@ -161,11 +159,11 @@ public class AzureDataSegmentPusherTest extends EasyMockSupport
azurePath
);
assertEquals(compressedSegmentData.length(), pushedDataSegment.getSize());
assertEquals(binaryVersion, (int) pushedDataSegment.getBinaryVersion());
Assert.assertEquals(compressedSegmentData.length(), pushedDataSegment.getSize());
Assert.assertEquals(binaryVersion, (int) pushedDataSegment.getBinaryVersion());
Map<String, Object> loadSpec = pushedDataSegment.getLoadSpec();
assertEquals(AzureStorageDruidModule.SCHEME, MapUtils.getString(loadSpec, "type"));
assertEquals(azurePath, MapUtils.getString(loadSpec, "blobPath"));
Assert.assertEquals(AzureStorageDruidModule.SCHEME, MapUtils.getString(loadSpec, "type"));
Assert.assertEquals(azurePath, MapUtils.getString(loadSpec, "blobPath"));
verifyAll();
}

View File

@ -25,6 +25,7 @@ import com.google.common.io.Files;
import org.apache.commons.io.FileUtils;
import org.apache.commons.io.IOUtils;
import org.apache.druid.java.util.common.StringUtils;
import org.easymock.EasyMock;
import org.easymock.EasyMockSupport;
import org.junit.Assert;
import org.junit.Before;
@ -35,9 +36,6 @@ import java.io.File;
import java.io.StringWriter;
import java.nio.charset.StandardCharsets;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
public class AzureTaskLogsTest extends EasyMockSupport
{
@ -66,7 +64,7 @@ public class AzureTaskLogsTest extends EasyMockSupport
final File logFile = new File(tmpDir, "log");
azureStorage.uploadBlob(logFile, container, prefix + "/" + taskid + "/log");
expectLastCall();
EasyMock.expectLastCall();
replayAll();
@ -85,9 +83,9 @@ public class AzureTaskLogsTest extends EasyMockSupport
final String testLog = "hello this is a log";
final String blobPath = prefix + "/" + taskid + "/log";
expect(azureStorage.getBlobExists(container, blobPath)).andReturn(true);
expect(azureStorage.getBlobLength(container, blobPath)).andReturn((long) testLog.length());
expect(azureStorage.getBlobInputStream(container, blobPath)).andReturn(
EasyMock.expect(azureStorage.getBlobExists(container, blobPath)).andReturn(true);
EasyMock.expect(azureStorage.getBlobLength(container, blobPath)).andReturn((long) testLog.length());
EasyMock.expect(azureStorage.getBlobInputStream(container, blobPath)).andReturn(
new ByteArrayInputStream(testLog.getBytes(StandardCharsets.UTF_8)));
@ -108,9 +106,9 @@ public class AzureTaskLogsTest extends EasyMockSupport
final String testLog = "hello this is a log";
final String blobPath = prefix + "/" + taskid + "/log";
expect(azureStorage.getBlobExists(container, blobPath)).andReturn(true);
expect(azureStorage.getBlobLength(container, blobPath)).andReturn((long) testLog.length());
expect(azureStorage.getBlobInputStream(container, blobPath)).andReturn(
EasyMock.expect(azureStorage.getBlobExists(container, blobPath)).andReturn(true);
EasyMock.expect(azureStorage.getBlobLength(container, blobPath)).andReturn((long) testLog.length());
EasyMock.expect(azureStorage.getBlobInputStream(container, blobPath)).andReturn(
new ByteArrayInputStream(testLog.getBytes(StandardCharsets.UTF_8)));
@ -131,9 +129,9 @@ public class AzureTaskLogsTest extends EasyMockSupport
final String testLog = "hello this is a log";
final String blobPath = prefix + "/" + taskid + "/log";
expect(azureStorage.getBlobExists(container, blobPath)).andReturn(true);
expect(azureStorage.getBlobLength(container, blobPath)).andReturn((long) testLog.length());
expect(azureStorage.getBlobInputStream(container, blobPath)).andReturn(
EasyMock.expect(azureStorage.getBlobExists(container, blobPath)).andReturn(true);
EasyMock.expect(azureStorage.getBlobLength(container, blobPath)).andReturn((long) testLog.length());
EasyMock.expect(azureStorage.getBlobInputStream(container, blobPath)).andReturn(
new ByteArrayInputStream(StringUtils.toUtf8(testLog)));

View File

@ -19,19 +19,17 @@
package org.apache.druid.storage.cloudfiles;
import org.easymock.EasyMock;
import org.easymock.EasyMockSupport;
import org.jclouds.io.Payload;
import org.junit.Assert;
import org.junit.Test;
import java.io.IOException;
import java.io.InputStream;
import static org.easymock.EasyMock.expect;
import static org.junit.Assert.assertEquals;
public class CloudFilesByteSourceTest extends EasyMockSupport
{
@Test
public void openStreamTest() throws IOException
{
@ -42,15 +40,15 @@ public class CloudFilesByteSourceTest extends EasyMockSupport
Payload payload = createMock(Payload.class);
InputStream stream = createMock(InputStream.class);
expect(objectApi.get(path, 0)).andReturn(cloudFilesObject);
expect(cloudFilesObject.getPayload()).andReturn(payload);
expect(payload.openStream()).andReturn(stream);
EasyMock.expect(objectApi.get(path, 0)).andReturn(cloudFilesObject);
EasyMock.expect(cloudFilesObject.getPayload()).andReturn(payload);
EasyMock.expect(payload.openStream()).andReturn(stream);
payload.close();
replayAll();
CloudFilesByteSource byteSource = new CloudFilesByteSource(objectApi, path);
assertEquals(stream, byteSource.openStream());
Assert.assertEquals(stream, byteSource.openStream());
byteSource.closeStream();
verifyAll();
@ -66,9 +64,9 @@ public class CloudFilesByteSourceTest extends EasyMockSupport
Payload payload = createMock(Payload.class);
InputStream stream = createMock(InputStream.class);
expect(objectApi.get(path, 0)).andReturn(cloudFilesObject);
expect(cloudFilesObject.getPayload()).andReturn(payload);
expect(payload.openStream()).andThrow(new IOException()).andReturn(stream);
EasyMock.expect(objectApi.get(path, 0)).andReturn(cloudFilesObject);
EasyMock.expect(cloudFilesObject.getPayload()).andReturn(payload);
EasyMock.expect(payload.openStream()).andThrow(new IOException()).andReturn(stream);
payload.close();
replayAll();
@ -78,13 +76,12 @@ public class CloudFilesByteSourceTest extends EasyMockSupport
byteSource.openStream();
}
catch (Exception e) {
assertEquals("Recoverable exception", e.getMessage());
Assert.assertEquals("Recoverable exception", e.getMessage());
}
assertEquals(stream, byteSource.openStream());
Assert.assertEquals(stream, byteSource.openStream());
byteSource.closeStream();
verifyAll();
}
}

View File

@ -19,19 +19,17 @@
package org.apache.druid.storage.cloudfiles;
import org.easymock.EasyMock;
import org.easymock.EasyMockSupport;
import org.jclouds.io.Payload;
import org.jclouds.openstack.swift.v1.domain.SwiftObject;
import org.jclouds.openstack.swift.v1.features.ObjectApi;
import org.jclouds.rackspace.cloudfiles.v1.CloudFilesApi;
import org.junit.Assert;
import org.junit.Test;
import static org.easymock.EasyMock.expect;
import static org.junit.Assert.assertEquals;
public class CloudFilesObjectApiProxyTest extends EasyMockSupport
{
@Test
public void getTest()
{
@ -44,21 +42,20 @@ public class CloudFilesObjectApiProxyTest extends EasyMockSupport
SwiftObject swiftObject = createMock(SwiftObject.class);
Payload payload = createMock(Payload.class);
expect(cloudFilesApi.getObjectApi(region, container)).andReturn(objectApi);
expect(objectApi.get(path)).andReturn(swiftObject);
expect(swiftObject.getPayload()).andReturn(payload);
EasyMock.expect(cloudFilesApi.getObjectApi(region, container)).andReturn(objectApi);
EasyMock.expect(objectApi.get(path)).andReturn(swiftObject);
EasyMock.expect(swiftObject.getPayload()).andReturn(payload);
replayAll();
CloudFilesObjectApiProxy cfoApiProxy = new CloudFilesObjectApiProxy(cloudFilesApi, region, container);
CloudFilesObject cloudFilesObject = cfoApiProxy.get(path, 0);
assertEquals(cloudFilesObject.getPayload(), payload);
assertEquals(cloudFilesObject.getRegion(), region);
assertEquals(cloudFilesObject.getContainer(), container);
assertEquals(cloudFilesObject.getPath(), path);
Assert.assertEquals(cloudFilesObject.getPayload(), payload);
Assert.assertEquals(cloudFilesObject.getRegion(), region);
Assert.assertEquals(cloudFilesObject.getContainer(), container);
Assert.assertEquals(cloudFilesObject.getPath(), path);
verifyAll();
}
}

View File

@ -26,6 +26,8 @@ import junitparams.Parameters;
import org.apache.druid.java.util.common.Pair;
import org.apache.druid.java.util.common.parsers.ParseException;
import org.apache.druid.java.util.common.parsers.Parser;
import org.hamcrest.MatcherAssert;
import org.hamcrest.Matchers;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
@ -33,15 +35,14 @@ import org.junit.runner.RunWith;
import java.util.HashMap;
import java.util.Map;
import static org.hamcrest.MatcherAssert.assertThat;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.isA;
@RunWith(JUnitParamsRunner.class)
public class InfluxParserTest
{
@SuppressWarnings("unused")
private String name;
@SuppressWarnings("unused")
private String input;
@SuppressWarnings("unused")
private Map<String, Object> expected;
private static Object[] testCase(String name, String input, Parsed expected)
@ -49,7 +50,6 @@ public class InfluxParserTest
return Lists.newArrayList(name, input, expected).toArray();
}
public Object[] testData()
{
return Lists.newArrayList(
@ -142,14 +142,20 @@ public class InfluxParserTest
{
Parser<String, Object> parser = new InfluxParser(null);
Map<String, Object> parsed = parser.parseToMap(input);
assertThat("correct measurement name", parsed.get("measurement"), equalTo(expected.measurement));
assertThat("correct timestamp", parsed.get(InfluxParser.TIMESTAMP_KEY), equalTo(expected.timestamp));
expected.kv.forEach((k, v) -> {
assertThat("correct field " + k, parsed.get(k), equalTo(v));
});
MatcherAssert.assertThat(
"correct measurement name",
parsed.get("measurement"),
Matchers.equalTo(expected.measurement)
);
MatcherAssert.assertThat(
"correct timestamp",
parsed.get(InfluxParser.TIMESTAMP_KEY),
Matchers.equalTo(expected.timestamp)
);
expected.kv.forEach((k, v) -> MatcherAssert.assertThat("correct field " + k, parsed.get(k), Matchers.equalTo(v)));
parsed.remove("measurement");
parsed.remove(InfluxParser.TIMESTAMP_KEY);
assertThat("No extra keys in parsed data", parsed.keySet(), equalTo(expected.kv.keySet()));
MatcherAssert.assertThat("No extra keys in parsed data", parsed.keySet(), Matchers.equalTo(expected.kv.keySet()));
}
@Test
@ -158,7 +164,7 @@ public class InfluxParserTest
Parser<String, Object> parser = new InfluxParser(Sets.newHashSet("cpu"));
String input = "cpu,host=foo.bar.baz,region=us-east,application=echo pct_idle=99.3,pct_user=88.8,m1_load=2 1465839830100400200";
Map<String, Object> parsed = parser.parseToMap(input);
assertThat(parsed.get("measurement"), equalTo("cpu"));
MatcherAssert.assertThat(parsed.get("measurement"), Matchers.equalTo("cpu"));
}
@Test
@ -170,7 +176,7 @@ public class InfluxParserTest
parser.parseToMap(input);
}
catch (ParseException t) {
assertThat(t, isA(ParseException.class));
MatcherAssert.assertThat(t, Matchers.isA(ParseException.class));
return;
}
@ -192,10 +198,10 @@ public class InfluxParserTest
{
Parser<String, Object> parser = new InfluxParser(null);
try {
Map res = parser.parseToMap(testCase.rhs);
parser.parseToMap(testCase.rhs);
}
catch (ParseException t) {
assertThat(t, isA(ParseException.class));
MatcherAssert.assertThat(t, Matchers.isA(ParseException.class));
return;
}
@ -206,9 +212,9 @@ public class InfluxParserTest
{
private String measurement;
private Long timestamp;
private Map<String, Object> kv = new HashMap<>();
private final Map<String, Object> kv = new HashMap<>();
public static Parsed row(String measurement, Long timestamp)
static Parsed row(String measurement, Long timestamp)
{
Parsed e = new Parsed();
e.measurement = measurement;
@ -216,7 +222,7 @@ public class InfluxParserTest
return e;
}
public Parsed with(String k, Object v)
Parsed with(String k, Object v)
{
kv.put(k, v);
return this;

View File

@ -41,6 +41,7 @@ import org.apache.druid.segment.TestHelper;
import org.apache.druid.segment.realtime.firehose.ChatHandlerProvider;
import org.apache.druid.segment.realtime.firehose.NoopChatHandlerProvider;
import org.apache.druid.server.security.AuthorizerMapper;
import org.easymock.EasyMock;
import org.hamcrest.CoreMatchers;
import org.junit.Assert;
import org.junit.Before;
@ -50,15 +51,13 @@ import org.junit.rules.ExpectedException;
import java.io.IOException;
import static org.easymock.EasyMock.createMock;
public class MaterializedViewSupervisorSpecTest
public class MaterializedViewSupervisorSpecTest
{
@Rule
public ExpectedException expectedException = ExpectedException.none();
private ObjectMapper objectMapper = TestHelper.makeJsonMapper();
private final ObjectMapper objectMapper = TestHelper.makeJsonMapper();
@Before
public void setup()
{
@ -73,53 +72,53 @@ public class MaterializedViewSupervisorSpecTest
.addValue(SQLMetadataSegmentManager.class, null)
.addValue(IndexerMetadataStorageCoordinator.class, null)
.addValue(MaterializedViewTaskConfig.class, new MaterializedViewTaskConfig())
.addValue(AuthorizerMapper.class, createMock(AuthorizerMapper.class))
.addValue(AuthorizerMapper.class, EasyMock.createMock(AuthorizerMapper.class))
.addValue(ChatHandlerProvider.class, new NoopChatHandlerProvider())
.addValue(SupervisorStateManagerConfig.class, new SupervisorStateManagerConfig())
);
}
@Test
public void testSupervisorSerialization() throws IOException
public void testSupervisorSerialization() throws IOException
{
String supervisorStr = "{\n" +
" \"type\" : \"derivativeDataSource\",\n" +
" \"baseDataSource\": \"wikiticker\",\n" +
" \"dimensionsSpec\":{\n" +
" \"dimensions\" : [\n" +
" \"isUnpatrolled\",\n" +
" \"metroCode\",\n" +
" \"namespace\",\n" +
" \"page\",\n" +
" \"regionIsoCode\",\n" +
" \"regionName\",\n" +
" \"user\"\n" +
" ]\n" +
" },\n" +
" \"metricsSpec\" : [\n" +
" {\n" +
" \"name\" : \"count\",\n" +
" \"type\" : \"count\"\n" +
" },\n" +
" {\n" +
" \"name\" : \"added\",\n" +
" \"type\" : \"longSum\",\n" +
" \"fieldName\" : \"added\"\n" +
" }\n" +
" ],\n" +
" \"tuningConfig\": {\n" +
" \"type\" : \"hadoop\"\n" +
" }\n" +
"}";
" \"type\" : \"derivativeDataSource\",\n" +
" \"baseDataSource\": \"wikiticker\",\n" +
" \"dimensionsSpec\":{\n" +
" \"dimensions\" : [\n" +
" \"isUnpatrolled\",\n" +
" \"metroCode\",\n" +
" \"namespace\",\n" +
" \"page\",\n" +
" \"regionIsoCode\",\n" +
" \"regionName\",\n" +
" \"user\"\n" +
" ]\n" +
" },\n" +
" \"metricsSpec\" : [\n" +
" {\n" +
" \"name\" : \"count\",\n" +
" \"type\" : \"count\"\n" +
" },\n" +
" {\n" +
" \"name\" : \"added\",\n" +
" \"type\" : \"longSum\",\n" +
" \"fieldName\" : \"added\"\n" +
" }\n" +
" ],\n" +
" \"tuningConfig\": {\n" +
" \"type\" : \"hadoop\"\n" +
" }\n" +
"}";
MaterializedViewSupervisorSpec expected = new MaterializedViewSupervisorSpec(
"wikiticker",
new DimensionsSpec(
Lists.newArrayList(
new StringDimensionSchema("isUnpatrolled"),
new StringDimensionSchema("metroCode"),
new StringDimensionSchema("namespace"),
new StringDimensionSchema("page"),
new StringDimensionSchema("regionIsoCode"),
new StringDimensionSchema("metroCode"),
new StringDimensionSchema("namespace"),
new StringDimensionSchema("page"),
new StringDimensionSchema("regionIsoCode"),
new StringDimensionSchema("regionName"),
new StringDimensionSchema("user")
),
@ -144,7 +143,7 @@ public class MaterializedViewSupervisorSpecTest
null,
null,
new MaterializedViewTaskConfig(),
createMock(AuthorizerMapper.class),
EasyMock.createMock(AuthorizerMapper.class),
new NoopChatHandlerProvider(),
new SupervisorStateManagerConfig()
);
@ -193,11 +192,17 @@ public class MaterializedViewSupervisorSpecTest
Assert.assertFalse(spec.isSuspended());
String suspendedSerialized = objectMapper.writeValueAsString(spec.createSuspendedSpec());
MaterializedViewSupervisorSpec suspendedSpec = objectMapper.readValue(suspendedSerialized, MaterializedViewSupervisorSpec.class);
MaterializedViewSupervisorSpec suspendedSpec = objectMapper.readValue(
suspendedSerialized,
MaterializedViewSupervisorSpec.class
);
Assert.assertTrue(suspendedSpec.isSuspended());
String runningSerialized = objectMapper.writeValueAsString(spec.createRunningSpec());
MaterializedViewSupervisorSpec runningSpec = objectMapper.readValue(runningSerialized, MaterializedViewSupervisorSpec.class);
MaterializedViewSupervisorSpec runningSpec = objectMapper.readValue(
runningSerialized,
MaterializedViewSupervisorSpec.class
);
Assert.assertFalse(runningSpec.isSuspended());
}
@ -208,7 +213,8 @@ public class MaterializedViewSupervisorSpecTest
expectedException.expectMessage(
"baseDataSource cannot be null or empty. Please provide a baseDataSource."
);
MaterializedViewSupervisorSpec materializedViewSupervisorSpec = new MaterializedViewSupervisorSpec(
//noinspection ResultOfObjectAllocationIgnored (this method call will trigger the expected exception)
new MaterializedViewSupervisorSpec(
"",
new DimensionsSpec(
Lists.newArrayList(
@ -241,7 +247,7 @@ public class MaterializedViewSupervisorSpecTest
null,
null,
new MaterializedViewTaskConfig(),
createMock(AuthorizerMapper.class),
EasyMock.createMock(AuthorizerMapper.class),
new NoopChatHandlerProvider(),
new SupervisorStateManagerConfig()
);
@ -254,7 +260,8 @@ public class MaterializedViewSupervisorSpecTest
expectedException.expectMessage(
"baseDataSource cannot be null or empty. Please provide a baseDataSource."
);
MaterializedViewSupervisorSpec materializedViewSupervisorSpec = new MaterializedViewSupervisorSpec(
//noinspection ResultOfObjectAllocationIgnored (this method call will trigger the expected exception)
new MaterializedViewSupervisorSpec(
null,
new DimensionsSpec(
Lists.newArrayList(
@ -287,7 +294,7 @@ public class MaterializedViewSupervisorSpecTest
null,
null,
new MaterializedViewTaskConfig(),
createMock(AuthorizerMapper.class),
EasyMock.createMock(AuthorizerMapper.class),
new NoopChatHandlerProvider(),
new SupervisorStateManagerConfig()
);

View File

@ -68,9 +68,6 @@ import java.util.Map;
import java.util.Set;
import java.util.SortedMap;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.expect;
public class MaterializedViewSupervisorTest
{
@Rule
@ -78,7 +75,6 @@ public class MaterializedViewSupervisorTest
@Rule
public final ExpectedException expectedException = ExpectedException.none();
private TestDerbyConnector derbyConnector;
private TaskStorage taskStorage;
private TaskMaster taskMaster;
private IndexerMetadataStorageCoordinator indexerMetadataStorageCoordinator;
@ -86,28 +82,27 @@ public class MaterializedViewSupervisorTest
private SQLMetadataSegmentManager sqlMetadataSegmentManager;
private TaskQueue taskQueue;
private MaterializedViewSupervisor supervisor;
private MaterializedViewSupervisorSpec spec;
private ObjectMapper objectMapper = TestHelper.makeJsonMapper();
private final ObjectMapper objectMapper = TestHelper.makeJsonMapper();
@Before
public void setUp()
{
derbyConnector = derbyConnectorRule.getConnector();
TestDerbyConnector derbyConnector = derbyConnectorRule.getConnector();
derbyConnector.createDataSourceTable();
derbyConnector.createSegmentTable();
taskStorage = createMock(TaskStorage.class);
taskMaster = createMock(TaskMaster.class);
taskStorage = EasyMock.createMock(TaskStorage.class);
taskMaster = EasyMock.createMock(TaskMaster.class);
indexerMetadataStorageCoordinator = new IndexerSQLMetadataStorageCoordinator(
objectMapper,
derbyConnectorRule.metadataTablesConfigSupplier().get(),
derbyConnector
);
metadataSupervisorManager = createMock(MetadataSupervisorManager.class);
sqlMetadataSegmentManager = createMock(SQLMetadataSegmentManager.class);
taskQueue = createMock(TaskQueue.class);
metadataSupervisorManager = EasyMock.createMock(MetadataSupervisorManager.class);
sqlMetadataSegmentManager = EasyMock.createMock(SQLMetadataSegmentManager.class);
taskQueue = EasyMock.createMock(TaskQueue.class);
taskQueue.start();
objectMapper.registerSubtypes(new NamedType(HashBasedNumberedShardSpec.class, "hashed"));
spec = new MaterializedViewSupervisorSpec(
MaterializedViewSupervisorSpec spec = new MaterializedViewSupervisorSpec(
"base",
new DimensionsSpec(Collections.singletonList(new StringDimensionSchema("dim")), null, null),
new AggregatorFactory[]{new LongSumAggregatorFactory("m1", "m1")},
@ -125,8 +120,8 @@ public class MaterializedViewSupervisorTest
sqlMetadataSegmentManager,
indexerMetadataStorageCoordinator,
new MaterializedViewTaskConfig(),
createMock(AuthorizerMapper.class),
createMock(ChatHandlerProvider.class),
EasyMock.createMock(AuthorizerMapper.class),
EasyMock.createMock(ChatHandlerProvider.class),
new SupervisorStateManagerConfig()
);
supervisor = (MaterializedViewSupervisor) spec.createSupervisor();
@ -160,9 +155,9 @@ public class MaterializedViewSupervisorTest
)
);
indexerMetadataStorageCoordinator.announceHistoricalSegments(baseSegments);
expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes();
expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes();
expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes();
EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes();
EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes();
EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes();
Pair<SortedMap<Interval, String>, Map<Interval, List<DataSegment>>> toBuildInterval = supervisor.checkSegments();
Map<Interval, List<DataSegment>> expectedSegments = new HashMap<>();
expectedSegments.put(
@ -201,11 +196,15 @@ public class MaterializedViewSupervisorTest
)
);
indexerMetadataStorageCoordinator.announceHistoricalSegments(baseSegments);
expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes();
expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes();
expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes();
expect(taskStorage.getStatus("test_task1")).andReturn(Optional.of(TaskStatus.failure("test_task1"))).anyTimes();
expect(taskStorage.getStatus("test_task2")).andReturn(Optional.of(TaskStatus.running("test_task2"))).anyTimes();
EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes();
EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.absent()).anyTimes();
EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes();
EasyMock.expect(taskStorage.getStatus("test_task1"))
.andReturn(Optional.of(TaskStatus.failure("test_task1")))
.anyTimes();
EasyMock.expect(taskStorage.getStatus("test_task2"))
.andReturn(Optional.of(TaskStatus.running("test_task2")))
.anyTimes();
EasyMock.replay(taskStorage);
Pair<Map<Interval, HadoopIndexTask>, Map<Interval, String>> runningTasksPair = supervisor.getRunningTasks();
@ -283,21 +282,21 @@ public class MaterializedViewSupervisorTest
sqlMetadataSegmentManager,
indexerMetadataStorageCoordinator,
new MaterializedViewTaskConfig(),
createMock(AuthorizerMapper.class),
createMock(ChatHandlerProvider.class),
EasyMock.createMock(AuthorizerMapper.class),
EasyMock.createMock(ChatHandlerProvider.class),
new SupervisorStateManagerConfig()
);
MaterializedViewSupervisor supervisor = (MaterializedViewSupervisor) suspended.createSupervisor();
// mock IndexerSQLMetadataStorageCoordinator to ensure that getDataSourceMetadata is not called
// which will be true if truly suspended, since this is the first operation of the 'run' method otherwise
IndexerSQLMetadataStorageCoordinator mock = createMock(IndexerSQLMetadataStorageCoordinator.class);
expect(mock.getDataSourceMetadata(suspended.getDataSourceName()))
.andAnswer(() -> {
Assert.fail();
return null;
})
.anyTimes();
IndexerSQLMetadataStorageCoordinator mock = EasyMock.createMock(IndexerSQLMetadataStorageCoordinator.class);
EasyMock.expect(mock.getDataSourceMetadata(suspended.getDataSourceName()))
.andAnswer(() -> {
Assert.fail();
return null;
})
.anyTimes();
EasyMock.replay(mock);
supervisor.run();

View File

@ -42,6 +42,7 @@ import org.apache.druid.java.util.http.client.HttpClient;
import org.apache.druid.metadata.IndexerSQLMetadataStorageCoordinator;
import org.apache.druid.metadata.TestDerbyConnector;
import org.apache.druid.query.Query;
import org.apache.druid.query.QueryRunnerTestHelper;
import org.apache.druid.query.QueryToolChestWarehouse;
import org.apache.druid.query.QueryWatcher;
import org.apache.druid.query.aggregation.LongSumAggregatorFactory;
@ -69,37 +70,33 @@ import java.util.Set;
import java.util.concurrent.Executor;
import java.util.concurrent.TimeUnit;
import static org.apache.druid.query.QueryRunnerTestHelper.allGran;
public class DatasourceOptimizerTest extends CuratorTestBase
public class DatasourceOptimizerTest extends CuratorTestBase
{
@Rule
public final TestDerbyConnector.DerbyConnectorRule derbyConnectorRule = new TestDerbyConnector.DerbyConnectorRule();
private TestDerbyConnector derbyConnector;
private DerivativeDataSourceManager derivativesManager;
private DruidServer druidServer;
private ObjectMapper jsonMapper;
private ZkPathsConfig zkPathsConfig;
private DataSourceOptimizer optimizer;
private MaterializedViewConfig viewConfig;
private IndexerSQLMetadataStorageCoordinator metadataStorageCoordinator;
private BatchServerInventoryView baseView;
private BrokerServerView brokerServerView;
@Before
public void setUp() throws Exception
{
derbyConnector = derbyConnectorRule.getConnector();
TestDerbyConnector derbyConnector = derbyConnectorRule.getConnector();
derbyConnector.createDataSourceTable();
derbyConnector.createSegmentTable();
viewConfig = new MaterializedViewConfig();
MaterializedViewConfig viewConfig = new MaterializedViewConfig();
jsonMapper = TestHelper.makeJsonMapper();
jsonMapper.registerSubtypes(new NamedType(DerivativeDataSourceMetadata.class, "view"));
metadataStorageCoordinator = EasyMock.createMock(IndexerSQLMetadataStorageCoordinator.class);
derivativesManager = new DerivativeDataSourceManager(
viewConfig,
derbyConnectorRule.metadataTablesConfigSupplier(),
jsonMapper,
viewConfig,
derbyConnectorRule.metadataTablesConfigSupplier(),
jsonMapper,
derbyConnector
);
metadataStorageCoordinator = new IndexerSQLMetadataStorageCoordinator(
@ -107,14 +104,14 @@ public class DatasourceOptimizerTest extends CuratorTestBase
derbyConnectorRule.metadataTablesConfigSupplier().get(),
derbyConnector
);
setupServerAndCurator();
curator.start();
curator.blockUntilConnected();
zkPathsConfig = new ZkPathsConfig();
setupViews();
druidServer = new DruidServer(
"localhost:1234",
"localhost:1234",
@ -127,14 +124,14 @@ public class DatasourceOptimizerTest extends CuratorTestBase
setupZNodeForServer(druidServer, new ZkPathsConfig(), jsonMapper);
optimizer = new DataSourceOptimizer(brokerServerView);
}
@After
public void tearDown() throws IOException
public void tearDown() throws IOException
{
baseView.stop();
tearDownServerAndCurator();
}
@Test(timeout = 60_000L)
public void testOptimize() throws InterruptedException
{
@ -156,10 +153,10 @@ public class DatasourceOptimizerTest extends CuratorTestBase
),
interval -> {
final DataSegment segment = createDataSegment(
"base",
interval,
"base",
interval,
"v1",
Lists.newArrayList("dim1", "dim2", "dim3", "dim4"),
Lists.newArrayList("dim1", "dim2", "dim3", "dim4"),
1024 * 1024
);
try {
@ -180,7 +177,13 @@ public class DatasourceOptimizerTest extends CuratorTestBase
"2011-04-03/2011-04-04"
),
interval -> {
final DataSegment segment = createDataSegment("derivative", interval, "v1", Lists.newArrayList("dim1", "dim2", "dim3"), 1024);
final DataSegment segment = createDataSegment(
"derivative",
interval,
"v1",
Lists.newArrayList("dim1", "dim2", "dim3"),
1024
);
try {
metadataStorageCoordinator.announceHistoricalSegments(Sets.newHashSet(segment));
announceSegmentForServer(druidServer, segment, zkPathsConfig, jsonMapper);
@ -200,7 +203,7 @@ public class DatasourceOptimizerTest extends CuratorTestBase
// build user query
TopNQuery userQuery = new TopNQueryBuilder()
.dataSource("base")
.granularity(allGran)
.granularity(QueryRunnerTestHelper.allGran)
.dimension("dim1")
.metric("cost")
.threshold(4)
@ -209,11 +212,11 @@ public class DatasourceOptimizerTest extends CuratorTestBase
Collections.singletonList(new LongSumAggregatorFactory("cost", "cost"))
)
.build();
List<Query> expectedQueryAfterOptimizing = Lists.newArrayList(
new TopNQueryBuilder()
.dataSource("derivative")
.granularity(allGran)
.granularity(QueryRunnerTestHelper.allGran)
.dimension("dim1")
.metric("cost")
.threshold(4)
@ -224,7 +227,7 @@ public class DatasourceOptimizerTest extends CuratorTestBase
.build(),
new TopNQueryBuilder()
.dataSource("base")
.granularity(allGran)
.granularity(QueryRunnerTestHelper.allGran)
.dimension("dim1")
.metric("cost")
.threshold(4)
@ -237,27 +240,27 @@ public class DatasourceOptimizerTest extends CuratorTestBase
Assert.assertEquals(expectedQueryAfterOptimizing, optimizer.optimize(userQuery));
derivativesManager.stop();
}
private DataSegment createDataSegment(String name, String intervalStr, String version, List<String> dims, long size)
{
return DataSegment.builder()
.dataSource(name)
.interval(Intervals.of(intervalStr))
.loadSpec(
ImmutableMap.of(
"type",
"local",
"path",
"somewhere"
)
)
.version(version)
.dimensions(dims)
.metrics(ImmutableList.of("cost"))
.shardSpec(NoneShardSpec.instance())
.binaryVersion(9)
.size(size)
.build();
.dataSource(name)
.interval(Intervals.of(intervalStr))
.loadSpec(
ImmutableMap.of(
"type",
"local",
"path",
"somewhere"
)
)
.version(version)
.dimensions(dims)
.metrics(ImmutableList.of("cost"))
.shardSpec(NoneShardSpec.instance())
.binaryVersion(9)
.size(size)
.build();
}
private void setupViews() throws Exception
@ -274,22 +277,19 @@ public class DatasourceOptimizerTest extends CuratorTestBase
@Override
public CallbackAction segmentAdded(DruidServerMetadata server, DataSegment segment)
{
CallbackAction res = callback.segmentAdded(server, segment);
return res;
return callback.segmentAdded(server, segment);
}
@Override
public CallbackAction segmentRemoved(DruidServerMetadata server, DataSegment segment)
{
CallbackAction res = callback.segmentRemoved(server, segment);
return res;
return callback.segmentRemoved(server, segment);
}
@Override
public CallbackAction segmentViewInitialized()
{
CallbackAction res = callback.segmentViewInitialized();
return res;
return callback.segmentViewInitialized();
}
}
);
@ -318,5 +318,4 @@ public class DatasourceOptimizerTest extends CuratorTestBase
retVal.getFactory().setCodec(retVal);
return retVal;
}
}

View File

@ -26,6 +26,7 @@ import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import org.apache.druid.math.expr.ExprMacroTable;
import org.apache.druid.query.Query;
import org.apache.druid.query.QueryRunnerTestHelper;
import org.apache.druid.query.TableDataSource;
import org.apache.druid.query.aggregation.DoubleMaxAggregatorFactory;
import org.apache.druid.query.aggregation.DoubleMinAggregatorFactory;
@ -41,21 +42,13 @@ import org.junit.Test;
import java.io.IOException;
import java.util.Collections;
import static org.apache.druid.query.QueryRunnerTestHelper.addRowsIndexConstant;
import static org.apache.druid.query.QueryRunnerTestHelper.allGran;
import static org.apache.druid.query.QueryRunnerTestHelper.commonDoubleAggregators;
import static org.apache.druid.query.QueryRunnerTestHelper.dataSource;
import static org.apache.druid.query.QueryRunnerTestHelper.fullOnIntervalSpec;
import static org.apache.druid.query.QueryRunnerTestHelper.indexMetric;
import static org.apache.druid.query.QueryRunnerTestHelper.marketDimension;
public class MaterializedViewQueryTest
public class MaterializedViewQueryTest
{
private static final ObjectMapper jsonMapper = TestHelper.makeJsonMapper();
private DataSourceOptimizer optimizer;
@Before
public void setUp()
public void setUp()
{
jsonMapper.registerSubtypes(new NamedType(MaterializedViewQuery.class, MaterializedViewQuery.TYPE));
optimizer = EasyMock.createMock(DataSourceOptimizer.class);
@ -65,21 +58,21 @@ public class MaterializedViewQueryTest
.addValue(DataSourceOptimizer.class, optimizer)
);
}
@Test
public void testQuerySerialization() throws IOException
{
TopNQuery topNQuery = new TopNQueryBuilder()
.dataSource(dataSource)
.granularity(allGran)
.dimension(marketDimension)
.metric(indexMetric)
.dataSource(QueryRunnerTestHelper.dataSource)
.granularity(QueryRunnerTestHelper.allGran)
.dimension(QueryRunnerTestHelper.marketDimension)
.metric(QueryRunnerTestHelper.indexMetric)
.threshold(4)
.intervals(fullOnIntervalSpec)
.intervals(QueryRunnerTestHelper.fullOnIntervalSpec)
.aggregators(
Lists.newArrayList(
Iterables.concat(
commonDoubleAggregators,
QueryRunnerTestHelper.commonDoubleAggregators,
Lists.newArrayList(
new DoubleMaxAggregatorFactory("maxIndex", "index"),
new DoubleMinAggregatorFactory("minIndex", "index")
@ -87,14 +80,14 @@ public class MaterializedViewQueryTest
)
)
)
.postAggregators(Collections.singletonList(addRowsIndexConstant))
.postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant))
.build();
MaterializedViewQuery query = new MaterializedViewQuery(topNQuery, optimizer);
String json = jsonMapper.writeValueAsString(query);
Query serdeQuery = jsonMapper.readValue(json, Query.class);
Assert.assertEquals(query, serdeQuery);
Assert.assertEquals(new TableDataSource(dataSource), query.getDataSource());
Assert.assertEquals(allGran, query.getGranularity());
Assert.assertEquals(fullOnIntervalSpec.getIntervals(), query.getIntervals());
Assert.assertEquals(new TableDataSource(QueryRunnerTestHelper.dataSource), query.getDataSource());
Assert.assertEquals(QueryRunnerTestHelper.allGran, query.getGranularity());
Assert.assertEquals(QueryRunnerTestHelper.fullOnIntervalSpec.getIntervals(), query.getIntervals());
}
}

View File

@ -19,10 +19,9 @@
package org.apache.druid.query.aggregation.momentsketch;
import org.junit.Assert;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class MomentSketchWrapperTest
{
@Test
@ -34,7 +33,7 @@ public class MomentSketchWrapperTest
byte[] bs = mw.toByteArray();
MomentSketchWrapper mw2 = MomentSketchWrapper.fromByteArray(bs);
assertEquals(10, mw2.getPowerSums()[1], 1e-10);
Assert.assertEquals(10, mw2.getPowerSums()[1], 1e-10);
}
@Test
@ -47,7 +46,7 @@ public class MomentSketchWrapperTest
}
double[] ps = {0.0, 0.5, 1.0};
double[] qs = mw.getQuantiles(ps);
assertEquals(0, qs[0], 1.0);
assertEquals(50, qs[1], 1.0);
Assert.assertEquals(0, qs[0], 1.0);
Assert.assertEquals(50, qs[1], 1.0);
}
}

View File

@ -31,6 +31,7 @@ import org.apache.druid.query.aggregation.momentsketch.MomentSketchModule;
import org.apache.druid.query.aggregation.momentsketch.MomentSketchWrapper;
import org.apache.druid.query.groupby.GroupByQueryConfig;
import org.apache.druid.query.groupby.GroupByQueryRunnerTest;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
@ -42,8 +43,6 @@ import java.util.ArrayList;
import java.util.Collection;
import java.util.List;
import static org.junit.Assert.assertEquals;
@RunWith(Parameterized.class)
public class MomentsSketchAggregatorTest
{
@ -85,7 +84,7 @@ public class MomentsSketchAggregatorTest
MomentSketchAggregatorFactory.class
);
assertEquals(factory, other);
Assert.assertEquals(factory, other);
}
@Test
@ -135,21 +134,21 @@ public class MomentsSketchAggregatorTest
)
);
List<Row> results = seq.toList();
assertEquals(1, results.size());
Assert.assertEquals(1, results.size());
Row row = results.get(0);
double[] quantilesArray = (double[]) row.getRaw("quantiles");
assertEquals(0, quantilesArray[0], 0.05);
assertEquals(.5, quantilesArray[1], 0.05);
assertEquals(1.0, quantilesArray[2], 0.05);
Assert.assertEquals(0, quantilesArray[0], 0.05);
Assert.assertEquals(.5, quantilesArray[1], 0.05);
Assert.assertEquals(1.0, quantilesArray[2], 0.05);
Double minValue = (Double) row.getRaw("min");
assertEquals(0.0011, minValue, 0.0001);
Assert.assertEquals(0.0011, minValue, 0.0001);
Double maxValue = (Double) row.getRaw("max");
assertEquals(0.9969, maxValue, 0.0001);
Assert.assertEquals(0.9969, maxValue, 0.0001);
MomentSketchWrapper sketchObject = (MomentSketchWrapper) row.getRaw("sketch");
assertEquals(400.0, sketchObject.getPowerSums()[0], 1e-10);
Assert.assertEquals(400.0, sketchObject.getPowerSums()[0], 1e-10);
}
@Test
@ -193,12 +192,12 @@ public class MomentsSketchAggregatorTest
);
List<Row> results = seq.toList();
assertEquals(1, results.size());
Assert.assertEquals(1, results.size());
Row row = results.get(0);
MomentSketchWrapper sketchObject = (MomentSketchWrapper) row.getRaw("sketch");
// 9 total products since we pre-sum the values.
assertEquals(9.0, sketchObject.getPowerSums()[0], 1e-10);
Assert.assertEquals(9.0, sketchObject.getPowerSums()[0], 1e-10);
}
}

View File

@ -33,8 +33,10 @@ import org.apache.druid.query.filter.SelectorDimFilter;
import org.apache.druid.query.movingaverage.averagers.AveragerFactory;
import org.apache.druid.query.movingaverage.averagers.ConstantAveragerFactory;
import org.apache.druid.query.movingaverage.averagers.LongMeanAveragerFactory;
import org.hamcrest.CoreMatchers;
import org.joda.time.DateTime;
import org.joda.time.chrono.ISOChronology;
import org.junit.Assert;
import org.junit.Test;
import java.util.ArrayList;
@ -45,16 +47,6 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static org.hamcrest.CoreMatchers.equalTo;
import static org.hamcrest.CoreMatchers.not;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertThat;
import static org.junit.Assert.assertTrue;
/**
*
*/
public class MovingAverageIterableTest
{
private static final DateTime JAN_1 = new DateTime(2017, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC());
@ -121,54 +113,54 @@ public class MovingAverageIterableTest
Iterator<Row> iter = iterable.iterator();
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
Row r = iter.next();
assertEquals(JAN_1, r.getTimestamp());
assertEquals("m", r.getRaw(GENDER));
Assert.assertEquals(JAN_1, r.getTimestamp());
Assert.assertEquals("m", r.getRaw(GENDER));
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
r = iter.next();
assertEquals(JAN_1, r.getTimestamp());
assertEquals("f", r.getRaw(GENDER));
Assert.assertEquals(JAN_1, r.getTimestamp());
Assert.assertEquals("f", r.getRaw(GENDER));
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
r = iter.next();
assertEquals(JAN_2, r.getTimestamp());
assertEquals("m", r.getRaw(GENDER));
Assert.assertEquals(JAN_2, r.getTimestamp());
Assert.assertEquals("m", r.getRaw(GENDER));
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
r = iter.next();
assertEquals(JAN_2, r.getTimestamp());
assertEquals("f", r.getRaw(GENDER));
Assert.assertEquals(JAN_2, r.getTimestamp());
Assert.assertEquals("f", r.getRaw(GENDER));
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
r = iter.next();
Row r2 = r;
assertEquals(JAN_3, r.getTimestamp());
assertEquals("US", r.getRaw(COUNTRY));
Assert.assertEquals(JAN_3, r.getTimestamp());
Assert.assertEquals("US", r.getRaw(COUNTRY));
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
r = iter.next();
assertEquals(JAN_3, r.getTimestamp());
assertEquals("US", r.getRaw(COUNTRY));
assertThat(r.getRaw(AGE), not(equalTo(r2.getRaw(AGE))));
Assert.assertEquals(JAN_3, r.getTimestamp());
Assert.assertEquals("US", r.getRaw(COUNTRY));
Assert.assertThat(r.getRaw(AGE), CoreMatchers.not(CoreMatchers.equalTo(r2.getRaw(AGE))));
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
r = iter.next();
assertEquals(JAN_4, r.getTimestamp());
assertEquals("f", r.getRaw(GENDER));
Assert.assertEquals(JAN_4, r.getTimestamp());
Assert.assertEquals("f", r.getRaw(GENDER));
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
r = iter.next();
assertEquals(JAN_4, r.getTimestamp());
assertEquals("u", r.getRaw(GENDER));
Assert.assertEquals(JAN_4, r.getTimestamp());
Assert.assertEquals("u", r.getRaw(GENDER));
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
r = iter.next();
assertEquals(JAN_4, r.getTimestamp());
assertEquals("m", r.getRaw(GENDER));
Assert.assertEquals(JAN_4, r.getTimestamp());
Assert.assertEquals("m", r.getRaw(GENDER));
assertFalse(iter.hasNext());
Assert.assertFalse(iter.hasNext());
}
@Test
@ -207,40 +199,41 @@ public class MovingAverageIterableTest
new RowBucket(JAN_3, Arrays.asList(row3, row4))
));
Iterator<Row> iter = new MovingAverageIterable(seq, ds, Arrays.asList(
new ConstantAveragerFactory("costPageViews", 7, retval),
new LongMeanAveragerFactory("movingAvgPageViews", 7, 1, "pageViews")
),
Collections.emptyList(),
Collections.singletonList(new LongSumAggregatorFactory("pageViews",
"pageViews"
))
Iterator<Row> iter = new MovingAverageIterable(
seq,
ds,
Arrays.asList(
new ConstantAveragerFactory("costPageViews", 7, retval),
new LongMeanAveragerFactory("movingAvgPageViews", 7, 1, "pageViews")
),
Collections.emptyList(),
Collections.singletonList(new LongSumAggregatorFactory("pageViews", "pageViews"))
).iterator();
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
Row caResult = iter.next();
assertEquals(JAN_1, caResult.getTimestamp());
assertEquals("m", (caResult.getDimension("gender")).get(0));
assertEquals(retval, caResult.getMetric("costPageViews").floatValue(), 0.0f);
assertEquals(1.4285715f, caResult.getMetric("movingAvgPageViews").floatValue(), 0.0f);
Assert.assertEquals(JAN_1, caResult.getTimestamp());
Assert.assertEquals("m", (caResult.getDimension("gender")).get(0));
Assert.assertEquals(retval, caResult.getMetric("costPageViews").floatValue(), 0.0f);
Assert.assertEquals(1.4285715f, caResult.getMetric("movingAvgPageViews").floatValue(), 0.0f);
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
caResult = iter.next();
assertEquals("m", (caResult.getDimension("gender")).get(0));
assertEquals(4.285714f, caResult.getMetric("movingAvgPageViews").floatValue(), 0.0f);
Assert.assertEquals("m", (caResult.getDimension("gender")).get(0));
Assert.assertEquals(4.285714f, caResult.getMetric("movingAvgPageViews").floatValue(), 0.0f);
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
caResult = iter.next();
assertEquals("m", (caResult.getDimension("gender")).get(0));
assertEquals(8.571428f, caResult.getMetric("movingAvgPageViews").floatValue(), 0.0f);
Assert.assertEquals("m", (caResult.getDimension("gender")).get(0));
Assert.assertEquals(8.571428f, caResult.getMetric("movingAvgPageViews").floatValue(), 0.0f);
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
caResult = iter.next();
assertEquals("f", (caResult.getDimension("gender")).get(0));
assertEquals(5.714285850f, caResult.getMetric("movingAvgPageViews").floatValue(), 0.0f);
Assert.assertEquals("f", (caResult.getDimension("gender")).get(0));
Assert.assertEquals(5.714285850f, caResult.getMetric("movingAvgPageViews").floatValue(), 0.0f);
assertFalse(iter.hasNext());
Assert.assertFalse(iter.hasNext());
}
@ -276,45 +269,47 @@ public class MovingAverageIterableTest
new RowBucket(JAN_2, Arrays.asList(jan2Row1, jan2Row2, jan2Row3))
));
Iterator<Row> iter = new MovingAverageIterable(seq, ds, Collections.singletonList(
new LongMeanAveragerFactory("movingAvgPageViews", 2, 1, "pageViews")),
Collections.emptyList(),
Collections.singletonList(new LongSumAggregatorFactory("pageViews",
"pageViews"
))
Iterator<Row> iter = new MovingAverageIterable(
seq,
ds,
Collections.singletonList(
new LongMeanAveragerFactory("movingAvgPageViews", 2, 1, "pageViews")
),
Collections.emptyList(),
Collections.singletonList(new LongSumAggregatorFactory("pageViews", "pageViews"))
).iterator();
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
Row result = iter.next();
assertEquals("m", (result.getDimension("gender")).get(0));
assertEquals(JAN_1, (result.getTimestamp()));
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_1, (result.getTimestamp()));
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("f", (result.getDimension("gender")).get(0));
assertEquals(JAN_1, (result.getTimestamp()));
Assert.assertEquals("f", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_1, (result.getTimestamp()));
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("u", (result.getDimension("gender")).get(0));
assertEquals(JAN_1, (result.getTimestamp()));
Assert.assertEquals("u", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_1, (result.getTimestamp()));
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("m", (result.getDimension("gender")).get(0));
assertEquals(JAN_2, (result.getTimestamp()));
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_2, (result.getTimestamp()));
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("f", (result.getDimension("gender")).get(0));
assertEquals(JAN_2, (result.getTimestamp()));
Assert.assertEquals("f", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_2, (result.getTimestamp()));
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("u", (result.getDimension("gender")).get(0));
assertEquals(JAN_2, (result.getTimestamp()));
Assert.assertEquals("u", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_2, (result.getTimestamp()));
assertFalse(iter.hasNext());
Assert.assertFalse(iter.hasNext());
}
@ -348,35 +343,37 @@ public class MovingAverageIterableTest
new RowBucket(JAN_2, Arrays.asList(jan2Row1, jan2Row2, jan2Row3))
));
Iterator<Row> iter = new MovingAverageIterable(seq, ds, Collections.singletonList(
new LongMeanAveragerFactory("movingAvgPageViews", 2, 1, "pageViews")),
Collections.emptyList(),
Collections.singletonList(new LongSumAggregatorFactory("pageViews",
"pageViews"
))
Iterator<Row> iter = new MovingAverageIterable(
seq,
ds,
Collections.singletonList(
new LongMeanAveragerFactory("movingAvgPageViews", 2, 1, "pageViews")
),
Collections.emptyList(),
Collections.singletonList(new LongSumAggregatorFactory("pageViews", "pageViews"))
).iterator();
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
Row result = iter.next();
assertEquals("m", (result.getDimension("gender")).get(0));
assertEquals(JAN_1, (result.getTimestamp()));
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_1, (result.getTimestamp()));
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("m", (result.getDimension("gender")).get(0));
assertEquals(JAN_2, (result.getTimestamp()));
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_2, (result.getTimestamp()));
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("f", (result.getDimension("gender")).get(0));
assertEquals(JAN_2, (result.getTimestamp()));
Assert.assertEquals("f", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_2, (result.getTimestamp()));
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("u", (result.getDimension("gender")).get(0));
assertEquals(JAN_2, (result.getTimestamp()));
Assert.assertEquals("u", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_2, (result.getTimestamp()));
assertFalse(iter.hasNext());
Assert.assertFalse(iter.hasNext());
}
// test injection when the data is missing at the end
@ -408,45 +405,47 @@ public class MovingAverageIterableTest
new RowBucket(JAN_2, Collections.singletonList(jan2Row1))
));
Iterator<Row> iter = new MovingAverageIterable(seq, ds, Collections.singletonList(
new LongMeanAveragerFactory("movingAvgPageViews", 2, 1, "pageViews")),
Collections.emptyList(),
Collections.singletonList(new LongSumAggregatorFactory("pageViews",
"pageViews"
))
Iterator<Row> iter = new MovingAverageIterable(
seq,
ds,
Collections.singletonList(
new LongMeanAveragerFactory("movingAvgPageViews", 2, 1, "pageViews")
),
Collections.emptyList(),
Collections.singletonList(new LongSumAggregatorFactory("pageViews", "pageViews"))
).iterator();
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
Row result = iter.next();
assertEquals("m", (result.getDimension("gender")).get(0));
assertEquals(JAN_1, (result.getTimestamp()));
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_1, (result.getTimestamp()));
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("f", (result.getDimension("gender")).get(0));
assertEquals(JAN_1, (result.getTimestamp()));
Assert.assertEquals("f", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_1, (result.getTimestamp()));
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("u", (result.getDimension("gender")).get(0));
assertEquals(JAN_1, (result.getTimestamp()));
Assert.assertEquals("u", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_1, (result.getTimestamp()));
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("m", (result.getDimension("gender")).get(0));
assertEquals(JAN_2, (result.getTimestamp()));
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_2, (result.getTimestamp()));
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("u", (result.getDimension("gender")).get(0));
assertEquals(JAN_2, (result.getTimestamp()));
Assert.assertEquals("u", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_2, (result.getTimestamp()));
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("f", (result.getDimension("gender")).get(0));
assertEquals(JAN_2, (result.getTimestamp()));
Assert.assertEquals("f", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_2, (result.getTimestamp()));
assertFalse(iter.hasNext());
Assert.assertFalse(iter.hasNext());
}
// test injection when the data is missing in the middle
@ -457,7 +456,6 @@ public class MovingAverageIterableTest
Map<String, Object> eventM = new HashMap<>();
Map<String, Object> eventF = new HashMap<>();
Map<String, Object> eventU = new HashMap<>();
Map<String, Object> event4 = new HashMap<>();
eventM.put("gender", "m");
eventM.put("pageViews", 10L);
@ -485,79 +483,81 @@ public class MovingAverageIterableTest
new RowBucket(JAN_4, Collections.singletonList(jan4Row1M))
));
Iterator<Row> iter = new MovingAverageIterable(seq, ds, Collections.singletonList(
new LongMeanAveragerFactory("movingAvgPageViews", 3, 1, "pageViews")),
Collections.emptyList(),
Collections.singletonList(new LongSumAggregatorFactory("pageViews",
"pageViews"
))
Iterator<Row> iter = new MovingAverageIterable(
seq,
ds,
Collections.singletonList(
new LongMeanAveragerFactory("movingAvgPageViews", 3, 1, "pageViews")
),
Collections.emptyList(),
Collections.singletonList(new LongSumAggregatorFactory("pageViews", "pageViews"))
).iterator();
// Jan 1
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
Row result = iter.next();
assertEquals("m", (result.getDimension("gender")).get(0));
assertEquals(JAN_1, (result.getTimestamp()));
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_1, (result.getTimestamp()));
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("f", (result.getDimension("gender")).get(0));
assertEquals(JAN_1, (result.getTimestamp()));
Assert.assertEquals("f", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_1, (result.getTimestamp()));
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("u", (result.getDimension("gender")).get(0));
assertEquals(JAN_1, (result.getTimestamp()));
Assert.assertEquals("u", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_1, (result.getTimestamp()));
// Jan 2
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("m", (result.getDimension("gender")).get(0));
assertEquals(JAN_2, (result.getTimestamp()));
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_2, (result.getTimestamp()));
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("u", (result.getDimension("gender")).get(0));
assertEquals(JAN_2, (result.getTimestamp()));
Assert.assertEquals("u", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_2, (result.getTimestamp()));
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("f", (result.getDimension("gender")).get(0));
assertEquals(JAN_2, (result.getTimestamp()));
Assert.assertEquals("f", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_2, (result.getTimestamp()));
// Jan 3
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("m", (result.getDimension("gender")).get(0));
assertEquals(JAN_3, (result.getTimestamp()));
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_3, (result.getTimestamp()));
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("f", (result.getDimension("gender")).get(0));
assertEquals(JAN_3, (result.getTimestamp()));
Assert.assertEquals("f", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_3, (result.getTimestamp()));
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("u", (result.getDimension("gender")).get(0));
assertEquals(JAN_3, (result.getTimestamp()));
Assert.assertEquals("u", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_3, (result.getTimestamp()));
// Jan 4
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("m", (result.getDimension("gender")).get(0));
assertEquals(JAN_4, (result.getTimestamp()));
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_4, (result.getTimestamp()));
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("u", (result.getDimension("gender")).get(0));
assertEquals(JAN_4, (result.getTimestamp()));
Assert.assertEquals("u", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_4, (result.getTimestamp()));
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("f", (result.getDimension("gender")).get(0));
assertEquals(JAN_4, (result.getTimestamp()));
Assert.assertEquals("f", (result.getDimension("gender")).get(0));
Assert.assertEquals(JAN_4, (result.getTimestamp()));
assertFalse(iter.hasNext());
Assert.assertFalse(iter.hasNext());
}
@Test
@ -585,25 +585,27 @@ public class MovingAverageIterableTest
new RowBucket(JAN_4, Collections.singletonList(row2))
));
Iterator<Row> iter = new MovingAverageIterable(seq, ds, Collections.singletonList(
new LongMeanAveragerFactory("movingAvgPageViews", 4, 1, "pageViews")),
Collections.emptyList(),
Collections.singletonList(new LongSumAggregatorFactory("pageViews",
"pageViews"
))
Iterator<Row> iter = new MovingAverageIterable(
seq,
ds,
Collections.singletonList(
new LongMeanAveragerFactory("movingAvgPageViews", 4, 1, "pageViews")
),
Collections.emptyList(),
Collections.singletonList(new LongSumAggregatorFactory("pageViews", "pageViews"))
).iterator();
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
Row result = iter.next();
assertEquals("m", (result.getDimension("gender")).get(0));
assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("m", (result.getDimension("gender")).get(0));
assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
assertFalse(iter.hasNext());
Assert.assertFalse(iter.hasNext());
}
@Test
@ -632,35 +634,37 @@ public class MovingAverageIterableTest
new RowBucket(JAN_4, Collections.singletonList(row2))
));
Iterator<Row> iter = new MovingAverageIterable(seq, ds, Collections.singletonList(
new LongMeanAveragerFactory("movingAvgPageViews", 4, 1, "pageViews")),
Collections.emptyList(),
Collections.singletonList(new LongSumAggregatorFactory("pageViews",
"pageViews"
))
Iterator<Row> iter = new MovingAverageIterable(
seq,
ds,
Collections.singletonList(
new LongMeanAveragerFactory("movingAvgPageViews", 4, 1, "pageViews")
),
Collections.emptyList(),
Collections.singletonList(new LongSumAggregatorFactory("pageViews", "pageViews"))
).iterator();
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
Row result = iter.next();
assertEquals("m", (result.getDimension("gender")).get(0));
assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("m", (result.getDimension("gender")).get(0));
assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("m", (result.getDimension("gender")).get(0));
assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("m", (result.getDimension("gender")).get(0));
assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
assertFalse(iter.hasNext());
Assert.assertFalse(iter.hasNext());
}
@Test
@ -693,34 +697,35 @@ public class MovingAverageIterableTest
DimFilter filter = new SelectorDimFilter("gender", "m", null);
FilteredAggregatorFactory filteredAggregatorFactory = new FilteredAggregatorFactory(aggregatorFactory, filter);
Iterator<Row> iter = new MovingAverageIterable(seq, ds, Collections.singletonList(
averagerfactory),
Collections.emptyList(),
Collections.singletonList(
filteredAggregatorFactory)
Iterator<Row> iter = new MovingAverageIterable(
seq,
ds,
Collections.singletonList(averagerfactory),
Collections.emptyList(),
Collections.singletonList(filteredAggregatorFactory)
).iterator();
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
Row result = iter.next();
assertEquals("m", (result.getDimension("gender")).get(0));
assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("m", (result.getDimension("gender")).get(0));
assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("m", (result.getDimension("gender")).get(0));
assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals("m", (result.getDimension("gender")).get(0));
assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
assertFalse(iter.hasNext());
Assert.assertFalse(iter.hasNext());
}
@Test
@ -751,53 +756,53 @@ public class MovingAverageIterableTest
new RowBucket(JAN_6, Collections.emptyList())
));
Iterator<Row> iter = new MovingAverageIterable(seq, ds, Collections.singletonList(
new LongMeanAveragerFactory("movingAvgPageViews", 4, 1, "pageViews")),
Collections.emptyList(),
Collections.singletonList(new LongSumAggregatorFactory("pageViews",
"pageViews"
))
Iterator<Row> iter = new MovingAverageIterable(
seq,
ds,
Collections.singletonList(
new LongMeanAveragerFactory("movingAvgPageViews", 4, 1, "pageViews")
),
Collections.emptyList(),
Collections.singletonList(new LongSumAggregatorFactory("pageViews", "pageViews"))
).iterator();
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
Row result = iter.next();
assertEquals(JAN_1, result.getTimestamp());
assertEquals("m", (result.getDimension("gender")).get(0));
assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
Assert.assertEquals(JAN_1, result.getTimestamp());
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(2.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals(JAN_2, result.getTimestamp());
assertEquals("m", (result.getDimension("gender")).get(0));
assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
Assert.assertEquals(JAN_2, result.getTimestamp());
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals(JAN_3, result.getTimestamp());
assertEquals("m", (result.getDimension("gender")).get(0));
assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
Assert.assertEquals(JAN_3, result.getTimestamp());
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals(JAN_4, result.getTimestamp());
assertEquals("m", (result.getDimension("gender")).get(0));
assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
Assert.assertEquals(JAN_4, result.getTimestamp());
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(7.5f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals(JAN_5, result.getTimestamp());
assertEquals("m", (result.getDimension("gender")).get(0));
assertEquals(5.0f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
Assert.assertEquals(JAN_5, result.getTimestamp());
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(5.0f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
result = iter.next();
assertEquals(JAN_6, result.getTimestamp());
assertEquals("m", (result.getDimension("gender")).get(0));
assertEquals(0.0f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
assertFalse(iter.hasNext());
Assert.assertEquals(JAN_6, result.getTimestamp());
Assert.assertEquals("m", (result.getDimension("gender")).get(0));
Assert.assertEquals(0.0f, result.getMetric("movingAvgPageViews").floatValue(), 0.0f);
Assert.assertFalse(iter.hasNext());
}
}

View File

@ -19,9 +19,7 @@
package org.apache.druid.query.movingaverage;
import com.fasterxml.jackson.core.JsonParseException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.JsonMappingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.dataformat.yaml.YAMLFactory;
@ -70,6 +68,7 @@ import org.apache.druid.server.initialization.ServerConfig;
import org.apache.druid.timeline.TimelineLookup;
import org.hamcrest.core.IsInstanceOf;
import org.joda.time.Interval;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@ -86,10 +85,6 @@ import java.util.List;
import java.util.Map;
import java.util.concurrent.Executor;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertThat;
/**
* Base class for implementing MovingAverageQuery tests
*/
@ -97,7 +92,6 @@ import static org.junit.Assert.assertThat;
public class MovingAverageQueryTest
{
private final ObjectMapper jsonMapper;
private final Injector injector;
private final QueryToolChestWarehouse warehouse;
private final RetryQueryRunnerConfig retryConfig;
private final ServerConfig serverConfig;
@ -106,7 +100,6 @@ public class MovingAverageQueryTest
private final List<Result<TimeseriesResultValue>> timeseriesResults = new ArrayList<>();
private final TestConfig config;
private final String yamlFile;
@Parameters(name = "{0}")
public static Iterable<String[]> data() throws IOException
@ -116,7 +109,7 @@ public class MovingAverageQueryTest
List<String[]> tests = new ArrayList<>();
for (String line = testReader.readLine(); line != null; line = testReader.readLine()) {
tests.add(new String[] {line});
tests.add(new String[]{line});
}
return tests;
@ -124,7 +117,6 @@ public class MovingAverageQueryTest
public MovingAverageQueryTest(String yamlFile) throws IOException
{
this.yamlFile = yamlFile;
List<Module> modules = getRequiredModules();
modules.add(
@ -137,19 +129,13 @@ public class MovingAverageQueryTest
@Override
public <T> QueryRunner<T> getQueryRunnerForIntervals(Query<T> query, Iterable<Interval> intervals)
{
return new QueryRunner<T>()
{
@Override
@SuppressWarnings("unchecked")
public Sequence<T> run(QueryPlus queryPlus, Map responseContext)
{
if (query instanceof GroupByQuery) {
return (Sequence<T>) Sequences.simple(groupByResults);
} else if (query instanceof TimeseriesQuery) {
return (Sequence<T>) Sequences.simple(timeseriesResults);
}
throw new UnsupportedOperationException("unexpected query type " + query.getType());
return (queryPlus, responseContext) -> {
if (query instanceof GroupByQuery) {
return (Sequence<T>) Sequences.simple(groupByResults);
} else if (query instanceof TimeseriesQuery) {
return (Sequence<T>) Sequences.simple(timeseriesResults);
}
throw new UnsupportedOperationException("unexpected query type " + query.getType());
};
}
@ -165,7 +151,7 @@ public class MovingAverageQueryTest
System.setProperty("druid.generic.useDefaultValueForNull", "true");
System.setProperty("druid.processing.buffer.sizeBytes", "655360");
Injector baseInjector = GuiceInjectors.makeStartupInjector();
injector = Initialization.makeInjectorWithModules(baseInjector, modules);
Injector injector = Initialization.makeInjectorWithModules(baseInjector, modules);
jsonMapper = injector.getInstance(ObjectMapper.class);
warehouse = injector.getInstance(QueryToolChestWarehouse.class);
@ -182,7 +168,7 @@ public class MovingAverageQueryTest
*
* @return The JSON query
*/
protected String getQueryString()
private String getQueryString()
{
return config.query.toString();
}
@ -192,7 +178,7 @@ public class MovingAverageQueryTest
*
* @return The JSON result
*/
protected String getExpectedResultString()
private String getExpectedResultString()
{
return config.expectedOutput.toString();
}
@ -204,7 +190,7 @@ public class MovingAverageQueryTest
*
* @return The JSON result from the groupby query
*/
protected String getGroupByResultJson()
private String getGroupByResultJson()
{
ArrayNode node = config.intermediateResults.get("groupBy");
return node == null ? null : node.toString();
@ -217,7 +203,7 @@ public class MovingAverageQueryTest
*
* @return The JSON result from the timeseries query
*/
protected String getTimeseriesResultJson()
private String getTimeseriesResultJson()
{
ArrayNode node = config.intermediateResults.get("timeseries");
return node == null ? null : node.toString();
@ -228,12 +214,12 @@ public class MovingAverageQueryTest
*
* @return The Query type
*/
protected Class<?> getExpectedQueryType()
private Class<?> getExpectedQueryType()
{
return MovingAverageQuery.class;
}
protected TypeReference<?> getExpectedResultType()
private TypeReference<?> getExpectedResultType()
{
return new TypeReference<List<Row>>()
{
@ -242,10 +228,8 @@ public class MovingAverageQueryTest
/**
* Returns a list of any additional Druid Modules necessary to run the test.
*
* @return List of Druid Modules
*/
protected List<Module> getRequiredModules()
private List<Module> getRequiredModules()
{
List<Module> list = new ArrayList<>();
@ -258,12 +242,8 @@ public class MovingAverageQueryTest
/**
* Set up any needed mocks to stub out backend query behavior.
*
* @throws IOException
* @throws JsonMappingException
* @throws JsonParseException
*/
protected void defineMocks() throws IOException
private void defineMocks() throws IOException
{
groupByResults.clear();
timeseriesResults.clear();
@ -286,45 +266,39 @@ public class MovingAverageQueryTest
/**
* converts Int to Long, Float to Double in the actual and expected result
*
* @param result
*/
protected List<MapBasedRow> consistentTypeCasting(List<MapBasedRow> result)
private List<MapBasedRow> consistentTypeCasting(List<MapBasedRow> result)
{
List<MapBasedRow> newResult = new ArrayList<>();
for (MapBasedRow row : result) {
final Map<String, Object> event = Maps.newLinkedHashMap((row).getEvent());
event.forEach((key, value) -> {
if (Integer.class.isInstance(value)) {
if (value instanceof Integer) {
event.put(key, ((Integer) value).longValue());
}
if (Float.class.isInstance(value)) {
if (value instanceof Float) {
event.put(key, ((Float) value).doubleValue());
}
});
newResult.add(new MapBasedRow(row.getTimestamp(), event));
}
return newResult;
}
/**
* Validate that the specified query behaves correctly.
*
* @throws IOException
* @throws JsonMappingException
* @throws JsonParseException
*/
@SuppressWarnings({"unchecked", "rawtypes"})
@Test
public void testQuery() throws IOException
{
Query<?> query = jsonMapper.readValue(getQueryString(), Query.class);
assertThat(query, IsInstanceOf.instanceOf(getExpectedQueryType()));
Assert.assertThat(query, IsInstanceOf.instanceOf(getExpectedQueryType()));
List<MapBasedRow> expectedResults = jsonMapper.readValue(getExpectedResultString(), getExpectedResultType());
assertNotNull(expectedResults);
assertThat(expectedResults, IsInstanceOf.instanceOf(List.class));
Assert.assertNotNull(expectedResults);
Assert.assertThat(expectedResults, IsInstanceOf.instanceOf(List.class));
CachingClusteredClient baseClient = new CachingClusteredClient(
warehouse,
@ -403,6 +377,6 @@ public class MovingAverageQueryTest
expectedResults = consistentTypeCasting(expectedResults);
actualResults = consistentTypeCasting(actualResults);
assertEquals(expectedResults, actualResults);
Assert.assertEquals(expectedResults, actualResults);
}
}

View File

@ -31,6 +31,7 @@ import org.apache.druid.query.spec.MultipleIntervalSegmentSpec;
import org.joda.time.DateTime;
import org.joda.time.Interval;
import org.joda.time.chrono.ISOChronology;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
@ -39,17 +40,8 @@ import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertNull;
/**
* Unit tests for PostAveragerCalcualtor
*/
public class PostAveragerAggregatorCalculatorTest
{
private MovingAverageQuery query;
private PostAveragerAggregatorCalculator pac;
private Map<String, Object> event;
private MapBasedRow row;
@ -58,9 +50,12 @@ public class PostAveragerAggregatorCalculatorTest
public void setup()
{
System.setProperty("druid.generic.useDefaultValueForNull", "true");
query = new MovingAverageQuery(
MovingAverageQuery query = new MovingAverageQuery(
new TableDataSource("d"),
new MultipleIntervalSegmentSpec(Collections.singletonList(new Interval("2017-01-01/2017-01-01", ISOChronology.getInstanceUTC()))),
new MultipleIntervalSegmentSpec(Collections.singletonList(new Interval(
"2017-01-01/2017-01-01",
ISOChronology.getInstanceUTC()
))),
null,
Granularities.DAY,
null,
@ -88,22 +83,22 @@ public class PostAveragerAggregatorCalculatorTest
@Test
public void testApply()
{
event.put("count", new Double(10.0));
event.put("avgCount", new Double(12.0));
event.put("count", 10.0);
event.put("avgCount", 12.0);
Row result = pac.apply(row);
assertEquals(result.getMetric("avgCountRatio").floatValue(), 10.0f / 12.0f, 0.0);
Assert.assertEquals(10.0f / 12.0f, result.getMetric("avgCountRatio").floatValue(), 0.0);
}
@Test
public void testApplyMissingColumn()
{
event.put("count", new Double(10.0));
event.put("count", 10.0);
Row result = pac.apply(row);
assertEquals(result.getMetric("avgCountRatio").floatValue(), 0.0, 0.0);
assertNull(result.getRaw("avgCountRatio"));
Assert.assertEquals(0.0, result.getMetric("avgCountRatio").floatValue(), 0.0);
Assert.assertNull(result.getRaw("avgCountRatio"));
}
}

View File

@ -27,6 +27,7 @@ import org.joda.time.DateTime;
import org.joda.time.Interval;
import org.joda.time.Period;
import org.joda.time.chrono.ISOChronology;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
@ -38,12 +39,8 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class RowBucketIterableTest
{
private static final DateTime JAN_1 = new DateTime(2017, 1, 1, 0, 0, 0, 0, ISOChronology.getInstanceUTC());
private static final DateTime JAN_2 = new DateTime(2017, 1, 2, 0, 0, 0, 0, ISOChronology.getInstanceUTC());
private static final DateTime JAN_3 = new DateTime(2017, 1, 3, 0, 0, 0, 0, ISOChronology.getInstanceUTC());
@ -91,11 +88,9 @@ public class RowBucketIterableTest
EVENT_U_30.put("pageViews", 30L);
}
// normal case. data for all the days present
@Test
public void testCompleteData()
{
intervals = new ArrayList<>();
intervals.add(INTERVAL_JAN_1_4);
@ -115,27 +110,25 @@ public class RowBucketIterableTest
Iterator<RowBucket> iter = rbi.iterator();
RowBucket actual = iter.next();
assertEquals(JAN_1, actual.getDateTime());
assertEquals(expectedDay1, actual.getRows());
Assert.assertEquals(JAN_1, actual.getDateTime());
Assert.assertEquals(expectedDay1, actual.getRows());
actual = iter.next();
assertEquals(JAN_2, actual.getDateTime());
assertEquals(expectedDay2, actual.getRows());
Assert.assertEquals(JAN_2, actual.getDateTime());
Assert.assertEquals(expectedDay2, actual.getRows());
actual = iter.next();
assertEquals(JAN_3, actual.getDateTime());
assertEquals(expectedDay3, actual.getRows());
Assert.assertEquals(JAN_3, actual.getDateTime());
Assert.assertEquals(expectedDay3, actual.getRows());
actual = iter.next();
assertEquals(JAN_4, actual.getDateTime());
assertEquals(expectedDay4, actual.getRows());
Assert.assertEquals(JAN_4, actual.getDateTime());
Assert.assertEquals(expectedDay4, actual.getRows());
}
// all days present and last day only has one row
@Test
public void testApplyLastDaySingleRow()
{
intervals = new ArrayList<>();
intervals.add(INTERVAL_JAN_1_4);
@ -156,23 +149,21 @@ public class RowBucketIterableTest
Iterator<RowBucket> iter = rbi.iterator();
RowBucket actual = iter.next();
assertEquals(expectedDay1, actual.getRows());
Assert.assertEquals(expectedDay1, actual.getRows());
actual = iter.next();
assertEquals(expectedDay2, actual.getRows());
Assert.assertEquals(expectedDay2, actual.getRows());
actual = iter.next();
assertEquals(expectedDay3, actual.getRows());
Assert.assertEquals(expectedDay3, actual.getRows());
actual = iter.next();
assertEquals(expectedDay4, actual.getRows());
Assert.assertEquals(expectedDay4, actual.getRows());
}
// all days present and last day has multiple rows
@Test
public void testApplyLastDayMultipleRows()
{
intervals = new ArrayList<>();
intervals.add(INTERVAL_JAN_1_4);
@ -195,23 +186,21 @@ public class RowBucketIterableTest
Iterator<RowBucket> iter = rbi.iterator();
RowBucket actual = iter.next();
assertEquals(expectedDay1, actual.getRows());
Assert.assertEquals(expectedDay1, actual.getRows());
actual = iter.next();
assertEquals(expectedDay2, actual.getRows());
Assert.assertEquals(expectedDay2, actual.getRows());
actual = iter.next();
assertEquals(expectedDay3, actual.getRows());
Assert.assertEquals(expectedDay3, actual.getRows());
actual = iter.next();
assertEquals(expectedDay4, actual.getRows());
Assert.assertEquals(expectedDay4, actual.getRows());
}
// test single day with single row
@Test
public void testSingleDaySingleRow()
{
intervals = new ArrayList<>();
intervals.add(INTERVAL_JAN_1_1);
@ -225,16 +214,13 @@ public class RowBucketIterableTest
Iterator<RowBucket> iter = rbi.iterator();
RowBucket actual = iter.next();
assertEquals(expectedDay1, actual.getRows());
assertEquals(JAN_1, actual.getDateTime());
Assert.assertEquals(expectedDay1, actual.getRows());
Assert.assertEquals(JAN_1, actual.getDateTime());
}
// test single day with multiple rows
@Test
public void testSingleDayMultipleRow()
{
intervals = new ArrayList<>();
intervals.add(INTERVAL_JAN_1_1);
@ -250,16 +236,13 @@ public class RowBucketIterableTest
Iterator<RowBucket> iter = rbi.iterator();
RowBucket actual = iter.next();
assertEquals(JAN_1, actual.getDateTime());
assertEquals(expectedDay1, actual.getRows());
Assert.assertEquals(JAN_1, actual.getDateTime());
Assert.assertEquals(expectedDay1, actual.getRows());
}
// missing day at the beginning followed by single row
@Test
public void testMissingDaysAtBegining()
{
List<Row> expectedDay1 = Collections.emptyList();
List<Row> expectedDay2 = Collections.singletonList(JAN_2_M_10);
@ -274,20 +257,17 @@ public class RowBucketIterableTest
Iterator<RowBucket> iter = rbi.iterator();
RowBucket actual = iter.next();
assertEquals(JAN_1, actual.getDateTime());
assertEquals(expectedDay1, actual.getRows());
Assert.assertEquals(JAN_1, actual.getDateTime());
Assert.assertEquals(expectedDay1, actual.getRows());
actual = iter.next();
assertEquals(JAN_2, actual.getDateTime());
assertEquals(expectedDay2, actual.getRows());
Assert.assertEquals(JAN_2, actual.getDateTime());
Assert.assertEquals(expectedDay2, actual.getRows());
}
// missing day at the beginning followed by multiple row
@Test
public void testMissingDaysAtBeginingFollowedByMultipleRow()
{
List<Row> expectedDay1 = Collections.emptyList();
List<Row> expectedDay2 = Collections.singletonList(JAN_2_M_10);
List<Row> expectedDay3 = Collections.singletonList(JAN_3_M_10);
@ -306,27 +286,25 @@ public class RowBucketIterableTest
Iterator<RowBucket> iter = rbi.iterator();
RowBucket actual = iter.next();
assertEquals(JAN_1, actual.getDateTime());
assertEquals(expectedDay1, actual.getRows());
Assert.assertEquals(JAN_1, actual.getDateTime());
Assert.assertEquals(expectedDay1, actual.getRows());
actual = iter.next();
assertEquals(JAN_2, actual.getDateTime());
assertEquals(expectedDay2, actual.getRows());
Assert.assertEquals(JAN_2, actual.getDateTime());
Assert.assertEquals(expectedDay2, actual.getRows());
actual = iter.next();
assertEquals(JAN_3, actual.getDateTime());
assertEquals(expectedDay3, actual.getRows());
Assert.assertEquals(JAN_3, actual.getDateTime());
Assert.assertEquals(expectedDay3, actual.getRows());
actual = iter.next();
assertEquals(JAN_4, actual.getDateTime());
assertEquals(expectedDay4, actual.getRows());
Assert.assertEquals(JAN_4, actual.getDateTime());
Assert.assertEquals(expectedDay4, actual.getRows());
}
// missing day at the beginning and at the end
@Test
public void testMissingDaysAtBeginingAndAtTheEnd()
{
List<Row> expectedDay1 = Collections.emptyList();
List<Row> expectedDay2 = Collections.singletonList(JAN_2_M_10);
List<Row> expectedDay3 = Collections.singletonList(JAN_3_M_10);
@ -344,27 +322,25 @@ public class RowBucketIterableTest
Iterator<RowBucket> iter = rbi.iterator();
RowBucket actual = iter.next();
assertEquals(JAN_1, actual.getDateTime());
assertEquals(expectedDay1, actual.getRows());
Assert.assertEquals(JAN_1, actual.getDateTime());
Assert.assertEquals(expectedDay1, actual.getRows());
actual = iter.next();
assertEquals(JAN_2, actual.getDateTime());
assertEquals(expectedDay2, actual.getRows());
Assert.assertEquals(JAN_2, actual.getDateTime());
Assert.assertEquals(expectedDay2, actual.getRows());
actual = iter.next();
assertEquals(JAN_3, actual.getDateTime());
assertEquals(expectedDay3, actual.getRows());
Assert.assertEquals(JAN_3, actual.getDateTime());
Assert.assertEquals(expectedDay3, actual.getRows());
actual = iter.next();
assertEquals(JAN_4, actual.getDateTime());
assertEquals(expectedDay4, actual.getRows());
Assert.assertEquals(JAN_4, actual.getDateTime());
Assert.assertEquals(expectedDay4, actual.getRows());
}
// multiple missing days in an interval
@Test
public void testMultipleMissingDays()
{
List<Row> expectedDay1 = Collections.emptyList();
List<Row> expectedDay2 = Collections.singletonList(JAN_2_M_10);
List<Row> expectedDay3 = Collections.emptyList();
@ -382,27 +358,25 @@ public class RowBucketIterableTest
Iterator<RowBucket> iter = rbi.iterator();
RowBucket actual = iter.next();
assertEquals(JAN_1, actual.getDateTime());
assertEquals(expectedDay1, actual.getRows());
Assert.assertEquals(JAN_1, actual.getDateTime());
Assert.assertEquals(expectedDay1, actual.getRows());
actual = iter.next();
assertEquals(JAN_2, actual.getDateTime());
assertEquals(expectedDay2, actual.getRows());
Assert.assertEquals(JAN_2, actual.getDateTime());
Assert.assertEquals(expectedDay2, actual.getRows());
actual = iter.next();
assertEquals(JAN_3, actual.getDateTime());
assertEquals(expectedDay3, actual.getRows());
Assert.assertEquals(JAN_3, actual.getDateTime());
Assert.assertEquals(expectedDay3, actual.getRows());
actual = iter.next();
assertEquals(JAN_4, actual.getDateTime());
assertEquals(expectedDay4, actual.getRows());
Assert.assertEquals(JAN_4, actual.getDateTime());
Assert.assertEquals(expectedDay4, actual.getRows());
}
// multiple missing days in an interval followed by multiple row at the end
@Test
public void testMultipleMissingDaysMultipleRowAtTheEnd()
{
List<Row> expectedDay1 = Collections.emptyList();
List<Row> expectedDay2 = Collections.singletonList(JAN_2_M_10);
List<Row> expectedDay3 = Collections.emptyList();
@ -422,32 +396,29 @@ public class RowBucketIterableTest
Iterator<RowBucket> iter = rbi.iterator();
RowBucket actual = iter.next();
assertEquals(JAN_1, actual.getDateTime());
assertEquals(expectedDay1, actual.getRows());
Assert.assertEquals(JAN_1, actual.getDateTime());
Assert.assertEquals(expectedDay1, actual.getRows());
actual = iter.next();
assertEquals(JAN_2, actual.getDateTime());
assertEquals(expectedDay2, actual.getRows());
Assert.assertEquals(JAN_2, actual.getDateTime());
Assert.assertEquals(expectedDay2, actual.getRows());
actual = iter.next();
assertEquals(JAN_3, actual.getDateTime());
assertEquals(expectedDay3, actual.getRows());
Assert.assertEquals(JAN_3, actual.getDateTime());
Assert.assertEquals(expectedDay3, actual.getRows());
actual = iter.next();
assertEquals(JAN_4, actual.getDateTime());
assertEquals(expectedDay4, actual.getRows());
Assert.assertEquals(JAN_4, actual.getDateTime());
Assert.assertEquals(expectedDay4, actual.getRows());
actual = iter.next();
assertEquals(JAN_5, actual.getDateTime());
assertEquals(expectedDay5, actual.getRows());
Assert.assertEquals(JAN_5, actual.getDateTime());
Assert.assertEquals(expectedDay5, actual.getRows());
}
// missing day in the middle followed by single row
@Test
public void testMissingDaysInMiddleOneRow()
{
List<Row> expectedDay1 = Collections.singletonList(JAN_1_M_10);
List<Row> expectedDay2 = Collections.singletonList(JAN_2_M_10);
List<Row> expectedDay3 = Collections.emptyList();
@ -466,25 +437,22 @@ public class RowBucketIterableTest
Iterator<RowBucket> iter = rbi.iterator();
RowBucket actual = iter.next();
assertEquals(expectedDay1, actual.getRows());
Assert.assertEquals(expectedDay1, actual.getRows());
actual = iter.next();
assertEquals(expectedDay2, actual.getRows());
Assert.assertEquals(expectedDay2, actual.getRows());
actual = iter.next();
assertEquals(JAN_3, actual.getDateTime());
assertEquals(expectedDay3, actual.getRows());
Assert.assertEquals(JAN_3, actual.getDateTime());
Assert.assertEquals(expectedDay3, actual.getRows());
actual = iter.next();
assertEquals(expectedDay4, actual.getRows());
Assert.assertEquals(expectedDay4, actual.getRows());
}
// missing day in the middle followed by multiple rows
@Test
public void testMissingDaysInMiddleMultipleRow()
{
List<Row> expectedDay1 = Collections.singletonList(JAN_1_M_10);
List<Row> expectedDay2 = Collections.emptyList();
List<Row> expectedDay3 = Collections.singletonList(JAN_3_M_10);
@ -503,28 +471,25 @@ public class RowBucketIterableTest
Iterator<RowBucket> iter = rbi.iterator();
RowBucket actual = iter.next();
assertEquals(JAN_1, actual.getDateTime());
assertEquals(expectedDay1, actual.getRows());
Assert.assertEquals(JAN_1, actual.getDateTime());
Assert.assertEquals(expectedDay1, actual.getRows());
actual = iter.next();
assertEquals(JAN_2, actual.getDateTime());
assertEquals(expectedDay2, actual.getRows());
Assert.assertEquals(JAN_2, actual.getDateTime());
Assert.assertEquals(expectedDay2, actual.getRows());
actual = iter.next();
assertEquals(JAN_3, actual.getDateTime());
assertEquals(expectedDay3, actual.getRows());
Assert.assertEquals(JAN_3, actual.getDateTime());
Assert.assertEquals(expectedDay3, actual.getRows());
actual = iter.next();
assertEquals(JAN_4, actual.getDateTime());
assertEquals(expectedDay4, actual.getRows());
Assert.assertEquals(JAN_4, actual.getDateTime());
Assert.assertEquals(expectedDay4, actual.getRows());
}
// data missing for last day .
@Test
public void testApplyLastDayNoRows()
{
intervals = new ArrayList<>();
intervals.add(INTERVAL_JAN_1_4);
@ -544,24 +509,22 @@ public class RowBucketIterableTest
Iterator<RowBucket> iter = rbi.iterator();
RowBucket actual = iter.next();
assertEquals(expectedDay1, actual.getRows());
Assert.assertEquals(expectedDay1, actual.getRows());
actual = iter.next();
assertEquals(expectedDay2, actual.getRows());
Assert.assertEquals(expectedDay2, actual.getRows());
actual = iter.next();
assertEquals(expectedDay3, actual.getRows());
Assert.assertEquals(expectedDay3, actual.getRows());
actual = iter.next();
assertEquals(JAN_4, actual.getDateTime());
assertEquals(expectedDay4, actual.getRows());
Assert.assertEquals(JAN_4, actual.getDateTime());
Assert.assertEquals(expectedDay4, actual.getRows());
}
// data missing for last two days
@Test
public void testApplyLastTwoDayNoRows()
{
List<Row> expectedDay1 = Arrays.asList(JAN_1_M_10, JAN_1_F_20);
List<Row> expectedDay2 = Collections.singletonList(JAN_2_M_10);
List<Row> expectedDay3 = Collections.emptyList();
@ -580,25 +543,23 @@ public class RowBucketIterableTest
Iterator<RowBucket> iter = rbi.iterator();
RowBucket actual = iter.next();
assertEquals(expectedDay1, actual.getRows());
Assert.assertEquals(expectedDay1, actual.getRows());
actual = iter.next();
assertEquals(expectedDay2, actual.getRows());
Assert.assertEquals(expectedDay2, actual.getRows());
actual = iter.next();
assertEquals(JAN_3, actual.getDateTime());
assertEquals(expectedDay3, actual.getRows());
Assert.assertEquals(JAN_3, actual.getDateTime());
Assert.assertEquals(expectedDay3, actual.getRows());
actual = iter.next();
assertEquals(JAN_4, actual.getDateTime());
assertEquals(expectedDay4, actual.getRows());
Assert.assertEquals(JAN_4, actual.getDateTime());
Assert.assertEquals(expectedDay4, actual.getRows());
}
@Test
public void testApplyMultipleInterval()
{
intervals = new ArrayList<>();
intervals.add(INTERVAL_JAN_1_4);
intervals.add(INTERVAL_JAN_6_8);
@ -628,31 +589,30 @@ public class RowBucketIterableTest
Iterator<RowBucket> iter = rbi.iterator();
RowBucket actual = iter.next();
assertEquals(expectedDay1, actual.getRows());
Assert.assertEquals(expectedDay1, actual.getRows());
actual = iter.next();
assertEquals(expectedDay2, actual.getRows());
Assert.assertEquals(expectedDay2, actual.getRows());
actual = iter.next();
assertEquals(expectedDay3, actual.getRows());
Assert.assertEquals(expectedDay3, actual.getRows());
actual = iter.next();
assertEquals(expectedDay4, actual.getRows());
Assert.assertEquals(expectedDay4, actual.getRows());
actual = iter.next();
assertEquals(expectedDay6, actual.getRows());
Assert.assertEquals(expectedDay6, actual.getRows());
actual = iter.next();
assertEquals(expectedDay7, actual.getRows());
Assert.assertEquals(expectedDay7, actual.getRows());
actual = iter.next();
assertEquals(expectedDay8, actual.getRows());
Assert.assertEquals(expectedDay8, actual.getRows());
}
@Test
public void testNodata()
{
intervals = new ArrayList<>();
intervals.add(INTERVAL_JAN_1_4);
intervals.add(INTERVAL_JAN_6_8);
@ -663,8 +623,8 @@ public class RowBucketIterableTest
RowBucketIterable rbi = new RowBucketIterable(seq, intervals, ONE_DAY);
Iterator<RowBucket> iter = rbi.iterator();
assertTrue(iter.hasNext());
Assert.assertTrue(iter.hasNext());
RowBucket actual = iter.next();
assertEquals(Collections.emptyList(), actual.getRows());
Assert.assertEquals(Collections.emptyList(), actual.getRows());
}
}

View File

@ -19,17 +19,15 @@
package org.apache.druid.query.movingaverage.averagers;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.util.Comparator;
import java.util.List;
import static org.junit.Assert.assertEquals;
public class BaseAveragerFactoryTest
{
private AveragerFactory<Long, Long> fac;
@Before
@ -55,14 +53,14 @@ public class BaseAveragerFactoryTest
public void testGetDependentFields()
{
List<String> dependentFields = fac.getDependentFields();
assertEquals(1, dependentFields.size());
assertEquals("field", dependentFields.get(0));
Assert.assertEquals(1, dependentFields.size());
Assert.assertEquals("field", dependentFields.get(0));
}
@Test
public void testFinalization()
{
Long input = Long.valueOf(5L);
assertEquals(input, fac.finalizeComputation(input));
Long input = 5L;
Assert.assertEquals(input, fac.finalizeComputation(input));
}
}

View File

@ -19,25 +19,17 @@
package org.apache.druid.query.movingaverage.averagers;
import org.junit.Assert;
import org.junit.Test;
import java.util.Collections;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotNull;
import static org.junit.Assert.assertNull;
import static org.junit.Assert.assertTrue;
/**
* Test class for BaseAverager
*/
public class BaseAveragerTest
{
public static class TestAverager extends BaseAverager<Integer, Integer>
{
public TestAverager(Class<Integer> clazz, int b, String name, String field, int cycleSize)
TestAverager(Class<Integer> clazz, int b, String name, String field, int cycleSize)
{
super(clazz, b, name, field, cycleSize);
}
@ -54,10 +46,10 @@ public class BaseAveragerTest
{
BaseAverager<Integer, Integer> avg = new TestAverager(Integer.class, 5, "test", "field", 1);
assertEquals("test", avg.getName());
assertEquals(5, avg.getNumBuckets());
assertEquals(5, avg.getBuckets().length);
assertTrue(avg.getBuckets().getClass().isArray());
Assert.assertEquals("test", avg.getName());
Assert.assertEquals(5, avg.getNumBuckets());
Assert.assertEquals(5, avg.getBuckets().length);
Assert.assertTrue(avg.getBuckets().getClass().isArray());
}
@Test
@ -67,24 +59,24 @@ public class BaseAveragerTest
Object[] buckets = avg.getBuckets();
avg.addElement(Collections.singletonMap("field", 1), Collections.emptyMap());
assertEquals(Integer.valueOf(1), buckets[0]);
assertNull(buckets[1]);
assertNull(buckets[2]);
Assert.assertEquals(1, buckets[0]);
Assert.assertNull(buckets[1]);
Assert.assertNull(buckets[2]);
avg.addElement(Collections.singletonMap("field", 2), Collections.emptyMap());
assertEquals(Integer.valueOf(1), buckets[0]);
assertEquals(Integer.valueOf(2), buckets[1]);
assertNull(buckets[2]);
Assert.assertEquals(1, buckets[0]);
Assert.assertEquals(2, buckets[1]);
Assert.assertNull(buckets[2]);
avg.addElement(Collections.singletonMap("field", 3), Collections.emptyMap());
assertEquals(Integer.valueOf(1), buckets[0]);
assertEquals(Integer.valueOf(2), buckets[1]);
assertEquals(Integer.valueOf(3), buckets[2]);
Assert.assertEquals(1, buckets[0]);
Assert.assertEquals(2, buckets[1]);
Assert.assertEquals(3, buckets[2]);
avg.addElement(Collections.singletonMap("field", 4), Collections.emptyMap());
assertEquals(Integer.valueOf(4), buckets[0]);
assertEquals(Integer.valueOf(2), buckets[1]);
assertEquals(Integer.valueOf(3), buckets[2]);
Assert.assertEquals(4, buckets[0]);
Assert.assertEquals(2, buckets[1]);
Assert.assertEquals(3, buckets[2]);
}
@Test
@ -97,32 +89,32 @@ public class BaseAveragerTest
avg.addElement(Collections.singletonMap("field", 1), Collections.emptyMap());
avg.addElement(Collections.singletonMap("field", 1), Collections.emptyMap());
assertEquals(Integer.valueOf(1), buckets[0]);
assertEquals(Integer.valueOf(1), buckets[1]);
assertEquals(Integer.valueOf(1), buckets[2]);
Assert.assertEquals(1, buckets[0]);
Assert.assertEquals(1, buckets[1]);
Assert.assertEquals(1, buckets[2]);
avg.skip();
assertNull(buckets[0]);
assertNotNull(buckets[1]);
assertNotNull(buckets[2]);
Assert.assertNull(buckets[0]);
Assert.assertNotNull(buckets[1]);
Assert.assertNotNull(buckets[2]);
avg.skip();
assertNull(buckets[0]);
assertNull(buckets[1]);
assertNotNull(buckets[2]);
Assert.assertNull(buckets[0]);
Assert.assertNull(buckets[1]);
Assert.assertNotNull(buckets[2]);
avg.skip();
assertNull(buckets[0]);
assertNull(buckets[1]);
assertNull(buckets[2]);
Assert.assertNull(buckets[0]);
Assert.assertNull(buckets[1]);
Assert.assertNull(buckets[2]);
// poke some test data into the array
buckets[0] = Integer.valueOf(1);
buckets[0] = 1;
avg.skip();
assertNull(buckets[0]);
assertNull(buckets[1]);
assertNull(buckets[2]);
Assert.assertNull(buckets[0]);
Assert.assertNull(buckets[1]);
Assert.assertNull(buckets[2]);
}
@Test
@ -130,16 +122,16 @@ public class BaseAveragerTest
{
BaseAverager<Integer, Integer> avg = new TestAverager(Integer.class, 3, "test", "field", 1);
assertFalse(avg.hasData());
Assert.assertFalse(avg.hasData());
avg.addElement(Collections.singletonMap("field", 1), Collections.emptyMap());
assertTrue(avg.hasData());
Assert.assertTrue(avg.hasData());
avg.skip();
avg.skip();
avg.skip();
assertFalse(avg.hasData());
Assert.assertFalse(avg.hasData());
}
@Test
@ -147,10 +139,9 @@ public class BaseAveragerTest
{
BaseAverager<Integer, Integer> avg = new TestAverager(Integer.class, 3, "test", "field", 1);
assertNull(avg.getResult());
Assert.assertNull(avg.getResult());
avg.addElement(Collections.singletonMap("field", 1), Collections.emptyMap());
assertEquals(Integer.valueOf(1), avg.getResult());
Assert.assertEquals(Integer.valueOf(1), avg.getResult());
}
}

View File

@ -19,20 +19,16 @@
package org.apache.druid.query.movingaverage.averagers;
import org.hamcrest.CoreMatchers;
import org.junit.Assert;
import org.junit.Test;
import static org.hamcrest.CoreMatchers.instanceOf;
import static org.junit.Assert.assertThat;
public class DoubleMaxAveragerFactoryTest
{
@Test
public void testCreateAverager()
{
AveragerFactory<?, ?> fac = new DoubleMaxAveragerFactory("test", 5, 1, "field");
assertThat(fac.createAverager(), instanceOf(DoubleMaxAverager.class));
Assert.assertThat(fac.createAverager(), CoreMatchers.instanceOf(DoubleMaxAverager.class));
}
}

View File

@ -19,39 +19,36 @@
package org.apache.druid.query.movingaverage.averagers;
import org.junit.Assert;
import org.junit.Test;
import java.util.Collections;
import java.util.HashMap;
import static org.junit.Assert.assertEquals;
public class DoubleMaxAveragerTest
{
@Test
public void testComputeResult()
{
BaseAverager<Number, Double> avg = new DoubleMaxAverager(3, "test", "field", 1);
assertEquals(Double.NEGATIVE_INFINITY, avg.computeResult(), 0.0);
Assert.assertEquals(Double.NEGATIVE_INFINITY, avg.computeResult(), 0.0);
avg.addElement(Collections.singletonMap("field", -1.1e100), new HashMap<>());
assertEquals(-1.1e100, avg.computeResult(), 0.0);
Assert.assertEquals(-1.1e100, avg.computeResult(), 0.0);
avg.addElement(Collections.singletonMap("field", 1.0), new HashMap<>());
assertEquals(1.0, avg.computeResult(), 0.0);
Assert.assertEquals(1.0, avg.computeResult(), 0.0);
avg.addElement(Collections.singletonMap("field", new Integer(1)), new HashMap<>());
assertEquals(1.0, avg.computeResult(), 0.0);
avg.addElement(Collections.singletonMap("field", 1), new HashMap<>());
Assert.assertEquals(1.0, avg.computeResult(), 0.0);
avg.addElement(Collections.singletonMap("field", 5.0), new HashMap<>());
avg.addElement(Collections.singletonMap("field", 3.0), new HashMap<>());
avg.addElement(Collections.singletonMap("field", 2.0), new HashMap<>());
assertEquals(5.0, avg.computeResult(), 0.0);
Assert.assertEquals(5.0, avg.computeResult(), 0.0);
avg.skip();
assertEquals(3.0, avg.computeResult(), 0.0);
Assert.assertEquals(3.0, avg.computeResult(), 0.0);
}
}

View File

@ -19,19 +19,16 @@
package org.apache.druid.query.movingaverage.averagers;
import org.hamcrest.core.IsInstanceOf;
import org.junit.Assert;
import org.junit.Test;
import static org.hamcrest.core.IsInstanceOf.instanceOf;
import static org.junit.Assert.assertThat;
public class DoubleMeanAveragerFactoryTest
{
@Test
public void testCreateAverager()
{
AveragerFactory<?, ?> fac = new DoubleMeanAveragerFactory("test", 5, 1, "field");
assertThat(fac.createAverager(), instanceOf(DoubleMeanAverager.class));
Assert.assertThat(fac.createAverager(), IsInstanceOf.instanceOf(DoubleMeanAverager.class));
}
}

View File

@ -19,40 +19,36 @@
package org.apache.druid.query.movingaverage.averagers;
import org.junit.Assert;
import org.junit.Test;
import java.util.Collections;
import java.util.HashMap;
import static org.junit.Assert.assertEquals;
public class DoubleMeanAveragerTest
{
@Test
public void testComputeResult()
{
BaseAverager<Number, Double> avg = new DoubleMeanAverager(3, "test", "field", 1);
assertEquals(0.0, avg.computeResult(), 0.0);
Assert.assertEquals(0.0, avg.computeResult(), 0.0);
avg.addElement(Collections.singletonMap("field", 3.0), new HashMap<>());
assertEquals(1.0, avg.computeResult(), 0.0);
Assert.assertEquals(1.0, avg.computeResult(), 0.0);
avg.addElement(Collections.singletonMap("field", 3.0), new HashMap<>());
assertEquals(2.0, avg.computeResult(), 0.0);
Assert.assertEquals(2.0, avg.computeResult(), 0.0);
avg.addElement(Collections.singletonMap("field", new Integer(0)), new HashMap<>());
assertEquals(2.0, avg.computeResult(), 0.0);
avg.addElement(Collections.singletonMap("field", 0), new HashMap<>());
Assert.assertEquals(2.0, avg.computeResult(), 0.0);
avg.addElement(Collections.singletonMap("field", 2.0), new HashMap<>());
avg.addElement(Collections.singletonMap("field", 2.0), new HashMap<>());
avg.addElement(Collections.singletonMap("field", 2.0), new HashMap<>());
assertEquals(2.0, avg.computeResult(), 0.0);
Assert.assertEquals(2.0, avg.computeResult(), 0.0);
avg.skip();
assertEquals(4.0 / 3, avg.computeResult(), 0.0);
Assert.assertEquals(4.0 / 3, avg.computeResult(), 0.0);
}
}

View File

@ -19,16 +19,14 @@
package org.apache.druid.query.movingaverage.averagers;
import org.junit.Assert;
import org.junit.Test;
import java.util.Collections;
import java.util.HashMap;
import static org.junit.Assert.assertEquals;
public class DoubleMeanAveragerWithPeriodTest
{
@Test
public void testComputeResult()
{
@ -49,10 +47,10 @@ public class DoubleMeanAveragerWithPeriodTest
averager.addElement(Collections.singletonMap("field", 5.0), new HashMap<>());
averager.addElement(Collections.singletonMap("field", 6.0), new HashMap<>());
assertEquals(7, averager.computeResult(), 0.0); // (7+7)/2
Assert.assertEquals(7, averager.computeResult(), 0.0); // (7+7)/2
averager.addElement(Collections.singletonMap("field", 3.0), new HashMap<>());
assertEquals(1, averager.computeResult(), 0.0); // (1+1)/2
Assert.assertEquals(1, averager.computeResult(), 0.0); // (1+1)/2
BaseAverager<Number, Double> averager1 = new DoubleMeanAverager(14, "test", "field", 3);
@ -71,11 +69,10 @@ public class DoubleMeanAveragerWithPeriodTest
averager1.addElement(Collections.singletonMap("field", 1.0), new HashMap<>());
averager1.addElement(Collections.singletonMap("field", 2.0), new HashMap<>());
assertEquals(1, averager1.computeResult(), 0.0); // (1+1+1+1+1)/5
Assert.assertEquals(1, averager1.computeResult(), 0.0); // (1+1+1+1+1)/5
assertEquals(2, averager1.computeResult(), 0.0); // (2+2+2+2+2)/5
assertEquals(13.0 / 5, averager1.computeResult(), 0.0); // (3+3+3+3+1)/5
Assert.assertEquals(2, averager1.computeResult(), 0.0); // (2+2+2+2+2)/5
Assert.assertEquals(13.0 / 5, averager1.computeResult(), 0.0); // (3+3+3+3+1)/5
}
}

View File

@ -19,19 +19,16 @@
package org.apache.druid.query.movingaverage.averagers;
import org.hamcrest.core.IsInstanceOf;
import org.junit.Assert;
import org.junit.Test;
import static org.hamcrest.core.IsInstanceOf.instanceOf;
import static org.junit.Assert.assertThat;
public class DoubleMeanNoNullAveragerFactoryTest
{
@Test
public void testCreateAverager()
{
AveragerFactory<?, ?> fac = new DoubleMeanNoNullAveragerFactory("test", 5, 1, "field");
assertThat(fac.createAverager(), instanceOf(DoubleMeanNoNullAverager.class));
Assert.assertThat(fac.createAverager(), IsInstanceOf.instanceOf(DoubleMeanNoNullAverager.class));
}
}

View File

@ -19,45 +19,43 @@
package org.apache.druid.query.movingaverage.averagers;
import org.junit.Assert;
import org.junit.Test;
import java.util.Collections;
import java.util.HashMap;
import static org.junit.Assert.assertEquals;
public class DoubleMeanNoNullAveragerTest
{
@Test
public void testComputeResult()
{
BaseAverager<Number, Double> avg = new DoubleMeanNoNullAverager(3, "test", "field", 1);
assertEquals(Double.NaN, avg.computeResult(), 0.0);
Assert.assertEquals(Double.NaN, avg.computeResult(), 0.0);
avg.addElement(Collections.singletonMap("field", 3.0), new HashMap<>());
assertEquals(3.0, avg.computeResult(), 0.0);
Assert.assertEquals(3.0, avg.computeResult(), 0.0);
avg.addElement(Collections.singletonMap("field", 3.0), new HashMap<>());
assertEquals(3.0, avg.computeResult(), 0.0);
Assert.assertEquals(3.0, avg.computeResult(), 0.0);
avg.addElement(Collections.singletonMap("field", new Integer(0)), new HashMap<>());
assertEquals(2.0, avg.computeResult(), 0.0);
avg.addElement(Collections.singletonMap("field", 0), new HashMap<>());
Assert.assertEquals(2.0, avg.computeResult(), 0.0);
avg.addElement(Collections.singletonMap("field", 2.0), new HashMap<>());
avg.addElement(Collections.singletonMap("field", 2.0), new HashMap<>());
avg.addElement(Collections.singletonMap("field", 2.0), new HashMap<>());
assertEquals(2.0, avg.computeResult(), 0.0);
Assert.assertEquals(2.0, avg.computeResult(), 0.0);
avg.skip();
assertEquals(2.0, avg.computeResult(), 0.0);
Assert.assertEquals(2.0, avg.computeResult(), 0.0);
// testing cycleSize functionality
BaseAverager<Number, Double> averager = new DoubleMeanNoNullAverager(14, "test", "field", 7);
averager.addElement(Collections.singletonMap("field", 2.0), new HashMap<>());
assertEquals(2.0, averager.computeResult(), 0.0);
Assert.assertEquals(2.0, averager.computeResult(), 0.0);
averager.addElement(Collections.singletonMap("field", 4.0), new HashMap<>());
averager.addElement(Collections.singletonMap("field", 5.0), new HashMap<>());
@ -73,10 +71,9 @@ public class DoubleMeanNoNullAveragerTest
averager.addElement(Collections.singletonMap("field", 15.0), new HashMap<>());
averager.addElement(Collections.singletonMap("field", 16.0), new HashMap<>());
assertEquals(7.5, averager.computeResult(), 0.0);
Assert.assertEquals(7.5, averager.computeResult(), 0.0);
averager.addElement(Collections.singletonMap("field", 3.0), new HashMap<>());
assertEquals(8.5, averager.computeResult(), 0.0);
Assert.assertEquals(8.5, averager.computeResult(), 0.0);
}
}

View File

@ -19,19 +19,16 @@
package org.apache.druid.query.movingaverage.averagers;
import org.hamcrest.core.IsInstanceOf;
import org.junit.Assert;
import org.junit.Test;
import static org.hamcrest.core.IsInstanceOf.instanceOf;
import static org.junit.Assert.assertThat;
public class DoubleMinAveragerFactoryTest
{
@Test
public void testCreateAverager()
{
AveragerFactory<?, ?> fac = new DoubleMinAveragerFactory("test", 5, 1, "field");
assertThat(fac.createAverager(), instanceOf(DoubleMinAverager.class));
Assert.assertThat(fac.createAverager(), IsInstanceOf.instanceOf(DoubleMinAverager.class));
}
}

View File

@ -19,40 +19,37 @@
package org.apache.druid.query.movingaverage.averagers;
import org.junit.Assert;
import org.junit.Test;
import java.util.Collections;
import java.util.HashMap;
import static org.junit.Assert.assertEquals;
public class DoubleMinAveragerTest
{
@Test
public void testComputeResult()
{
BaseAverager<Number, Double> avg = new DoubleMinAverager(3, "test", "field", 1);
assertEquals(Double.POSITIVE_INFINITY, avg.computeResult(), 0.0);
Assert.assertEquals(Double.POSITIVE_INFINITY, avg.computeResult(), 0.0);
avg.addElement(Collections.singletonMap("field", -1.1e100), new HashMap<>());
assertEquals(-1.1e100, avg.computeResult(), 0.0);
Assert.assertEquals(-1.1e100, avg.computeResult(), 0.0);
avg.addElement(Collections.singletonMap("field", 1.0), new HashMap<>());
assertEquals(-1.1e100, avg.computeResult(), 0.0);
Assert.assertEquals(-1.1e100, avg.computeResult(), 0.0);
avg.addElement(Collections.singletonMap("field", new Integer(1)), new HashMap<>());
assertEquals(-1.1e100, avg.computeResult(), 0.0);
Assert.assertEquals(-1.1e100, avg.computeResult(), 0.0);
avg.addElement(Collections.singletonMap("field", 5.0), new HashMap<>());
avg.addElement(Collections.singletonMap("field", 2.0), new HashMap<>());
avg.addElement(Collections.singletonMap("field", 3.0), new HashMap<>());
assertEquals(2.0, avg.computeResult(), 0.0);
Assert.assertEquals(2.0, avg.computeResult(), 0.0);
avg.skip();
avg.skip();
assertEquals(3.0, avg.computeResult(), 0.0);
Assert.assertEquals(3.0, avg.computeResult(), 0.0);
}
}

View File

@ -19,19 +19,16 @@
package org.apache.druid.query.movingaverage.averagers;
import org.hamcrest.core.IsInstanceOf;
import org.junit.Assert;
import org.junit.Test;
import static org.hamcrest.core.IsInstanceOf.instanceOf;
import static org.junit.Assert.assertThat;
public class LongMaxAveragerFactoryTest
{
@Test
public void testCreateAverager()
{
AveragerFactory<?, ?> fac = new LongMaxAveragerFactory("test", 5, 1, "field");
assertThat(fac.createAverager(), instanceOf(LongMaxAverager.class));
Assert.assertThat(fac.createAverager(), IsInstanceOf.instanceOf(LongMaxAverager.class));
}
}

View File

@ -19,39 +19,36 @@
package org.apache.druid.query.movingaverage.averagers;
import org.junit.Assert;
import org.junit.Test;
import java.util.Collections;
import java.util.HashMap;
import static org.junit.Assert.assertEquals;
public class LongMaxAveragerTest
{
@Test
public void testComputeResult()
{
BaseAverager<Number, Long> avg = new LongMaxAverager(3, "test", "field", 1);
assertEquals(Long.MIN_VALUE, (long) avg.computeResult());
Assert.assertEquals(Long.MIN_VALUE, (long) avg.computeResult());
avg.addElement(Collections.singletonMap("field", -1000000L), new HashMap<>());
assertEquals(-1000000, (long) avg.computeResult());
Assert.assertEquals(-1000000, (long) avg.computeResult());
avg.addElement(Collections.singletonMap("field", 1L), new HashMap<>());
assertEquals(1, (long) avg.computeResult());
Assert.assertEquals(1, (long) avg.computeResult());
avg.addElement(Collections.singletonMap("field", new Integer(1)), new HashMap<>());
assertEquals(1, (long) avg.computeResult());
avg.addElement(Collections.singletonMap("field", 1), new HashMap<>());
Assert.assertEquals(1, (long) avg.computeResult());
avg.addElement(Collections.singletonMap("field", 5L), new HashMap<>());
avg.addElement(Collections.singletonMap("field", 3L), new HashMap<>());
avg.addElement(Collections.singletonMap("field", 2L), new HashMap<>());
assertEquals(5, (long) avg.computeResult());
Assert.assertEquals(5, (long) avg.computeResult());
avg.skip();
assertEquals(3, (long) avg.computeResult());
Assert.assertEquals(3, (long) avg.computeResult());
}
}

View File

@ -19,19 +19,16 @@
package org.apache.druid.query.movingaverage.averagers;
import org.hamcrest.core.IsInstanceOf;
import org.junit.Assert;
import org.junit.Test;
import static org.hamcrest.core.IsInstanceOf.instanceOf;
import static org.junit.Assert.assertThat;
public class LongMeanAveragerFactoryTest
{
@Test
public void testCreateAverager()
{
AveragerFactory<?, ?> fac = new LongMeanAveragerFactory("test", 5, 1, "field");
assertThat(fac.createAverager(), instanceOf(LongMeanAverager.class));
Assert.assertThat(fac.createAverager(), IsInstanceOf.instanceOf(LongMeanAverager.class));
}
}

View File

@ -19,39 +19,36 @@
package org.apache.druid.query.movingaverage.averagers;
import org.junit.Assert;
import org.junit.Test;
import java.util.Collections;
import java.util.HashMap;
import static org.junit.Assert.assertEquals;
public class LongMeanAveragerTest
{
@Test
public void testComputeResult()
{
BaseAverager<Number, Double> avg = new LongMeanAverager(3, "test", "field", 1);
assertEquals(0.0, avg.computeResult(), 0.0);
Assert.assertEquals(0.0, avg.computeResult(), 0.0);
avg.addElement(Collections.singletonMap("field", 3L), new HashMap<>());
assertEquals(1.0, avg.computeResult(), 0.0);
Assert.assertEquals(1.0, avg.computeResult(), 0.0);
avg.addElement(Collections.singletonMap("field", 3L), new HashMap<>());
assertEquals(2.0, avg.computeResult(), 0.0);
Assert.assertEquals(2.0, avg.computeResult(), 0.0);
avg.addElement(Collections.singletonMap("field", 3), new HashMap<>());
assertEquals(3.0, avg.computeResult(), 0.0);
Assert.assertEquals(3.0, avg.computeResult(), 0.0);
avg.addElement(Collections.singletonMap("field", 2L), new HashMap<>());
avg.addElement(Collections.singletonMap("field", 2L), new HashMap<>());
avg.addElement(Collections.singletonMap("field", 2L), new HashMap<>());
assertEquals(2.0, avg.computeResult(), 0.0);
Assert.assertEquals(2.0, avg.computeResult(), 0.0);
avg.skip();
assertEquals(4.0 / 3, avg.computeResult(), 0.0);
Assert.assertEquals(4.0 / 3, avg.computeResult(), 0.0);
}
}

View File

@ -19,19 +19,16 @@
package org.apache.druid.query.movingaverage.averagers;
import org.hamcrest.core.IsInstanceOf;
import org.junit.Assert;
import org.junit.Test;
import static org.hamcrest.core.IsInstanceOf.instanceOf;
import static org.junit.Assert.assertThat;
public class LongMeanNoNullAveragerFactoryTest
{
@Test
public void testCreateAverager()
{
AveragerFactory<?, ?> fac = new LongMeanNoNullAveragerFactory("test", 5, 1, "field");
assertThat(fac.createAverager(), instanceOf(LongMeanNoNullAverager.class));
Assert.assertThat(fac.createAverager(), IsInstanceOf.instanceOf(LongMeanNoNullAverager.class));
}
}

View File

@ -19,39 +19,37 @@
package org.apache.druid.query.movingaverage.averagers;
import org.junit.Assert;
import org.junit.Test;
import java.util.Collections;
import java.util.HashMap;
import static org.junit.Assert.assertEquals;
public class LongMeanNoNullAveragerTest
{
@Test
public void testComputeResult()
{
BaseAverager<Number, Double> avg = new LongMeanNoNullAverager(3, "test", "field", 1);
assertEquals(Double.NaN, avg.computeResult(), 0.0);
Assert.assertEquals(Double.NaN, avg.computeResult(), 0.0);
avg.addElement(Collections.singletonMap("field", 3L), new HashMap<>());
assertEquals(3.0, avg.computeResult(), 0.0);
Assert.assertEquals(3.0, avg.computeResult(), 0.0);
avg.addElement(Collections.singletonMap("field", 3L), new HashMap<>());
assertEquals(3.0, avg.computeResult(), 0.0);
Assert.assertEquals(3.0, avg.computeResult(), 0.0);
avg.addElement(Collections.singletonMap("field", new Integer(0)), new HashMap<>());
assertEquals(2.0, avg.computeResult(), 0.0);
avg.addElement(Collections.singletonMap("field", 0), new HashMap<>());
Assert.assertEquals(2.0, avg.computeResult(), 0.0);
avg.addElement(Collections.singletonMap("field", 2L), new HashMap<>());
avg.addElement(Collections.singletonMap("field", 2L), new HashMap<>());
avg.addElement(Collections.singletonMap("field", 2L), new HashMap<>());
assertEquals(2.0, avg.computeResult(), 0.0);
Assert.assertEquals(2.0, avg.computeResult(), 0.0);
avg.skip();
assertEquals(2.0, avg.computeResult(), 0.0);
Assert.assertEquals(2.0, avg.computeResult(), 0.0);
}
}

View File

@ -19,19 +19,16 @@
package org.apache.druid.query.movingaverage.averagers;
import org.hamcrest.core.IsInstanceOf;
import org.junit.Assert;
import org.junit.Test;
import static org.hamcrest.core.IsInstanceOf.instanceOf;
import static org.junit.Assert.assertThat;
public class LongMinAveragerFactoryTest
{
@Test
public void testCreateAverager()
{
AveragerFactory<?, ?> fac = new LongMinAveragerFactory("test", 5, 1, "field");
assertThat(fac.createAverager(), instanceOf(LongMinAverager.class));
Assert.assertThat(fac.createAverager(), IsInstanceOf.instanceOf(LongMinAverager.class));
}
}

View File

@ -19,40 +19,37 @@
package org.apache.druid.query.movingaverage.averagers;
import org.junit.Assert;
import org.junit.Test;
import java.util.Collections;
import java.util.HashMap;
import static org.junit.Assert.assertEquals;
public class LongMinAveragerTest
{
@Test
public void testComputeResult()
{
BaseAverager<Number, Long> avg = new LongMinAverager(3, "test", "field", 1);
assertEquals(Long.MAX_VALUE, (long) avg.computeResult());
Assert.assertEquals(Long.MAX_VALUE, (long) avg.computeResult());
avg.addElement(Collections.singletonMap("field", -10000L), new HashMap<>());
assertEquals(-10000, (long) avg.computeResult());
Assert.assertEquals(-10000, (long) avg.computeResult());
avg.addElement(Collections.singletonMap("field", 1L), new HashMap<>());
assertEquals(-10000, (long) avg.computeResult());
Assert.assertEquals(-10000, (long) avg.computeResult());
avg.addElement(Collections.singletonMap("field", new Integer(1000)), new HashMap<>());
assertEquals(-10000, (long) avg.computeResult());
avg.addElement(Collections.singletonMap("field", 1000), new HashMap<>());
Assert.assertEquals(-10000, (long) avg.computeResult());
avg.addElement(Collections.singletonMap("field", 5L), new HashMap<>());
avg.addElement(Collections.singletonMap("field", 2L), new HashMap<>());
avg.addElement(Collections.singletonMap("field", 3L), new HashMap<>());
assertEquals(2, (long) avg.computeResult());
Assert.assertEquals(2, (long) avg.computeResult());
avg.skip();
avg.skip();
assertEquals(3, (long) avg.computeResult());
Assert.assertEquals(3, (long) avg.computeResult());
}
}

View File

@ -30,8 +30,6 @@ import org.junit.Test;
import java.util.HashMap;
import java.util.Map;
import static org.junit.Assert.assertEquals;
public class EventConverterTest
{
private EventConverter converter;
@ -66,16 +64,16 @@ public class EventConverterTest
expectedTags.put("type", "groupBy");
OpentsdbEvent opentsdbEvent = converter.convert(configuredEvent);
assertEquals("query.time", opentsdbEvent.getMetric());
assertEquals(dateTime.getMillis() / 1000L, opentsdbEvent.getTimestamp());
assertEquals(10, opentsdbEvent.getValue());
assertEquals(expectedTags, opentsdbEvent.getTags());
Assert.assertEquals("query.time", opentsdbEvent.getMetric());
Assert.assertEquals(dateTime.getMillis() / 1000L, opentsdbEvent.getTimestamp());
Assert.assertEquals(10, opentsdbEvent.getValue());
Assert.assertEquals(expectedTags, opentsdbEvent.getTags());
ServiceMetricEvent notConfiguredEvent = new ServiceMetricEvent.Builder()
.setDimension("dataSource", "data-source")
.setDimension("type", "groupBy")
.build(dateTime, "foo/bar", 10)
.build("broker", "brokerHost1");
assertEquals(null, converter.convert(notConfiguredEvent));
Assert.assertNull(converter.convert(notConfiguredEvent));
}
}

View File

@ -23,12 +23,9 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.collect.ImmutableMap;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.emitter.service.ServiceMetricEvent;
import org.junit.Assert;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
/**
*/
public class DimensionConverterTest
{
@Test
@ -56,10 +53,10 @@ public class DimensionConverterTest
event.getUserDims(),
actual
);
assertEquals("correct StatsDMetric.Type", StatsDMetric.Type.timer, statsDMetric.type);
Assert.assertEquals("correct StatsDMetric.Type", StatsDMetric.Type.timer, statsDMetric.type);
ImmutableMap.Builder<String, String> expected = new ImmutableMap.Builder<>();
expected.put("dataSource", "data-source");
expected.put("type", "groupBy");
assertEquals("correct Dimensions", expected.build(), actual.build());
Assert.assertEquals("correct Dimensions", expected.build(), actual.build());
}
}

View File

@ -23,65 +23,60 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.timgroup.statsd.StatsDClient;
import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.java.util.emitter.service.ServiceMetricEvent;
import org.easymock.EasyMock;
import org.junit.Test;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.verify;
/**
*/
public class StatsDEmitterTest
{
@Test
public void testConvertRange()
{
StatsDClient client = createMock(StatsDClient.class);
StatsDClient client = EasyMock.createMock(StatsDClient.class);
StatsDEmitter emitter = new StatsDEmitter(
new StatsDEmitterConfig("localhost", 8888, null, null, null, null, null, null, null),
new ObjectMapper(),
client
);
client.gauge("broker.query.cache.total.hitRate", 54, new String[0]);
replay(client);
client.gauge("broker.query.cache.total.hitRate", 54);
EasyMock.replay(client);
emitter.emit(new ServiceMetricEvent.Builder()
.setDimension("dataSource", "data-source")
.build(DateTimes.nowUtc(), "query/cache/total/hitRate", 0.54)
.build("broker", "brokerHost1")
);
verify(client);
EasyMock.verify(client);
}
@Test
public void testConvertRangeWithDogstatsd()
{
StatsDClient client = createMock(StatsDClient.class);
StatsDClient client = EasyMock.createMock(StatsDClient.class);
StatsDEmitter emitter = new StatsDEmitter(
new StatsDEmitterConfig("localhost", 8888, null, null, null, null, null, true, null),
new ObjectMapper(),
client
);
client.gauge("broker.query.cache.total.hitRate", 0.54, new String[0]);
replay(client);
client.gauge("broker.query.cache.total.hitRate", 0.54);
EasyMock.replay(client);
emitter.emit(new ServiceMetricEvent.Builder()
.setDimension("dataSource", "data-source")
.build(DateTimes.nowUtc(), "query/cache/total/hitRate", 0.54)
.build("broker", "brokerHost1")
);
verify(client);
EasyMock.verify(client);
}
@Test
public void testNoConvertRange()
{
StatsDClient client = createMock(StatsDClient.class);
StatsDClient client = EasyMock.createMock(StatsDClient.class);
StatsDEmitter emitter = new StatsDEmitter(
new StatsDEmitterConfig("localhost", 8888, null, null, null, null, null, null, null),
new ObjectMapper(),
client
);
client.time("broker.query.time.data-source.groupBy", 10, new String[0]);
replay(client);
client.time("broker.query.time.data-source.groupBy", 10);
EasyMock.replay(client);
emitter.emit(new ServiceMetricEvent.Builder()
.setDimension("dataSource", "data-source")
.setDimension("type", "groupBy")
@ -96,20 +91,20 @@ public class StatsDEmitterTest
.build(DateTimes.nowUtc(), "query/time", 10)
.build("broker", "brokerHost1")
);
verify(client);
EasyMock.verify(client);
}
@Test
public void testConfigOptions()
{
StatsDClient client = createMock(StatsDClient.class);
StatsDClient client = EasyMock.createMock(StatsDClient.class);
StatsDEmitter emitter = new StatsDEmitter(
new StatsDEmitterConfig("localhost", 8888, null, "#", true, null, null, null, null),
new ObjectMapper(),
client
);
client.time("brokerHost1#broker#query#time#data-source#groupBy", 10, new String[0]);
replay(client);
client.time("brokerHost1#broker#query#time#data-source#groupBy", 10);
EasyMock.replay(client);
emitter.emit(new ServiceMetricEvent.Builder()
.setDimension("dataSource", "data-source")
.setDimension("type", "groupBy")
@ -124,21 +119,22 @@ public class StatsDEmitterTest
.build(DateTimes.nowUtc(), "query/time", 10)
.build("broker", "brokerHost1")
);
verify(client);
EasyMock.verify(client);
}
@Test
public void testDogstatsdEnabled()
{
StatsDClient client = createMock(StatsDClient.class);
StatsDClient client = EasyMock.createMock(StatsDClient.class);
StatsDEmitter emitter = new StatsDEmitter(
new StatsDEmitterConfig("localhost", 8888, null, "#", true, null, null, true, null),
new ObjectMapper(),
client
);
client.time("broker#query#time", 10,
new String[] {"dataSource:data-source", "type:groupBy", "hostname:brokerHost1"});
replay(client);
"dataSource:data-source", "type:groupBy", "hostname:brokerHost1"
);
EasyMock.replay(client);
emitter.emit(new ServiceMetricEvent.Builder()
.setDimension("dataSource", "data-source")
.setDimension("type", "groupBy")
@ -153,25 +149,25 @@ public class StatsDEmitterTest
.build(DateTimes.nowUtc(), "query/time", 10)
.build("broker", "brokerHost1")
);
verify(client);
EasyMock.verify(client);
}
@Test
public void testBlankHolderOptions()
{
StatsDClient client = createMock(StatsDClient.class);
StatsDClient client = EasyMock.createMock(StatsDClient.class);
StatsDEmitter emitter = new StatsDEmitter(
new StatsDEmitterConfig("localhost", 8888, null, null, true, null, null, null, null),
new ObjectMapper(),
client
);
client.count("brokerHost1.broker.jvm.gc.count.G1-GC", 1, new String[0]);
replay(client);
client.count("brokerHost1.broker.jvm.gc.count.G1-GC", 1);
EasyMock.replay(client);
emitter.emit(new ServiceMetricEvent.Builder()
.setDimension("gcName", "G1 GC")
.build(DateTimes.nowUtc(), "jvm/gc/count", 1)
.build("broker", "brokerHost1")
);
verify(client);
EasyMock.verify(client);
}
}

View File

@ -39,6 +39,7 @@ import org.apache.thrift.protocol.TBinaryProtocol;
import org.apache.thrift.protocol.TCompactProtocol;
import org.apache.thrift.protocol.TJSONProtocol;
import org.hamcrest.CoreMatchers;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
@ -46,9 +47,6 @@ import org.junit.rules.ExpectedException;
import java.nio.ByteBuffer;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class ThriftInputRowParserTest
{
@Rule
@ -82,10 +80,10 @@ public class ThriftInputRowParserTest
"example/book.jar",
"org.apache.druid.data.input.thrift.Book"
);
assertEquals(parser1.getThriftClass().getName(), "org.apache.druid.data.input.thrift.Book");
Assert.assertEquals("org.apache.druid.data.input.thrift.Book", parser1.getThriftClass().getName());
ThriftInputRowParser parser2 = new ThriftInputRowParser(parseSpec, null, "org.apache.druid.data.input.thrift.Book");
assertEquals(parser2.getThriftClass().getName(), "org.apache.druid.data.input.thrift.Book");
Assert.assertEquals("org.apache.druid.data.input.thrift.Book", parser2.getThriftClass().getName());
}
@Test
@ -144,17 +142,18 @@ public class ThriftInputRowParserTest
expectedException.expect(CoreMatchers.instanceOf(IllegalStateException.class));
expectedException.expectMessage("JavaScript is disabled");
//noinspection ResultOfMethodCallIgnored (this method call will trigger the expected exception)
parser.parseBatch(ByteBuffer.allocate(1)).get(0);
}
public void serializationAndTest(ThriftInputRowParser parser, byte[] bytes)
private void serializationAndTest(ThriftInputRowParser parser, byte[] bytes)
{
ByteBuffer buffer = ByteBuffer.wrap(bytes);
InputRow row1 = parser.parseBatch(buffer).get(0);
assertTrue(row1.getDimension("title").get(0).equals("title"));
Assert.assertEquals("title", row1.getDimension("title").get(0));
InputRow row2 = parser.parseBatch(new BytesWritable(bytes)).get(0);
assertTrue(row2.getDimension("lastName").get(0).equals("last"));
Assert.assertEquals("last", row2.getDimension("lastName").get(0));
}
}

View File

@ -35,10 +35,6 @@ import org.junit.Test;
import java.io.File;
import java.io.IOException;
import static org.apache.druid.data.input.AvroStreamInputRowParserTest.DIMENSIONS;
import static org.apache.druid.data.input.AvroStreamInputRowParserTest.PARSE_SPEC;
import static org.apache.druid.data.input.AvroStreamInputRowParserTest.assertInputRowCorrect;
import static org.apache.druid.data.input.AvroStreamInputRowParserTest.buildSomeAvroDatum;
public class AvroHadoopInputRowParserTest
{
@ -55,7 +51,7 @@ public class AvroHadoopInputRowParserTest
@Test
public void testParseNotFromSpark() throws IOException
{
testParse(buildSomeAvroDatum());
testParse(AvroStreamInputRowParserTest.buildSomeAvroDatum());
}
@Test
@ -66,19 +62,19 @@ public class AvroHadoopInputRowParserTest
private void testParse(GenericRecord record) throws IOException
{
AvroHadoopInputRowParser parser = new AvroHadoopInputRowParser(PARSE_SPEC);
AvroHadoopInputRowParser parser = new AvroHadoopInputRowParser(AvroStreamInputRowParserTest.PARSE_SPEC);
AvroHadoopInputRowParser parser2 = jsonMapper.readValue(
jsonMapper.writeValueAsBytes(parser),
AvroHadoopInputRowParser.class
);
InputRow inputRow = parser2.parseBatch(record).get(0);
assertInputRowCorrect(inputRow, DIMENSIONS);
AvroStreamInputRowParserTest.assertInputRowCorrect(inputRow, AvroStreamInputRowParserTest.DIMENSIONS);
}
private static GenericRecord buildAvroFromFile() throws IOException
{
return buildAvroFromFile(
buildSomeAvroDatum()
AvroStreamInputRowParserTest.buildSomeAvroDatum()
);
}
@ -107,5 +103,4 @@ public class AvroHadoopInputRowParserTest
return record;
}
}

View File

@ -45,6 +45,7 @@ import org.apache.druid.java.util.common.parsers.JSONPathFieldType;
import org.apache.druid.java.util.common.parsers.JSONPathSpec;
import org.joda.time.DateTime;
import org.joda.time.chrono.ISOChronology;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.schemarepo.InMemoryRepository;
@ -68,8 +69,6 @@ import java.util.List;
import java.util.Map;
import java.util.regex.Pattern;
import static org.junit.Assert.assertEquals;
public class AvroStreamInputRowParserTest
{
private static final String EVENT_TYPE = "eventType";
@ -123,9 +122,9 @@ public class AvroStreamInputRowParserTest
private static final long SUB_LONG_VALUE = 1543698L;
private static final int SUB_INT_VALUE = 4892;
private static final MySubRecord SOME_RECORD_VALUE = MySubRecord.newBuilder()
.setSubInt(SUB_INT_VALUE)
.setSubLong(SUB_LONG_VALUE)
.build();
.setSubInt(SUB_INT_VALUE)
.setSubLong(SUB_LONG_VALUE)
.build();
private static final List<CharSequence> SOME_STRING_ARRAY_VALUE = Arrays.asList("8", "4", "2", "1");
private static final List<Integer> SOME_INT_ARRAY_VALUE = Arrays.asList(1, 2, 4, 8);
private static final Map<CharSequence, Integer> SOME_INT_VALUE_MAP_VALUE = Maps.asMap(
@ -181,7 +180,7 @@ public class AvroStreamInputRowParserTest
ByteBufferInputRowParser.class
);
assertEquals(parser, parser2);
Assert.assertEquals(parser, parser2);
}
@Test
@ -204,7 +203,7 @@ public class AvroStreamInputRowParserTest
// encode schema id
Avro1124SubjectAndIdConverter converter = new Avro1124SubjectAndIdConverter(TOPIC);
TypedSchemaRepository<Integer, Schema, String> repositoryClient = new TypedSchemaRepository<Integer, Schema, String>(
TypedSchemaRepository<Integer, Schema, String> repositoryClient = new TypedSchemaRepository<>(
repository,
new IntegerConverter(),
new AvroSchemaConverter(),
@ -269,25 +268,28 @@ public class AvroStreamInputRowParserTest
static void assertInputRowCorrect(InputRow inputRow, List<String> expectedDimensions)
{
assertEquals(expectedDimensions, inputRow.getDimensions());
assertEquals(1543698L, inputRow.getTimestampFromEpoch());
Assert.assertEquals(expectedDimensions, inputRow.getDimensions());
Assert.assertEquals(1543698L, inputRow.getTimestampFromEpoch());
// test dimensions
assertEquals(Collections.singletonList(EVENT_TYPE_VALUE), inputRow.getDimension(EVENT_TYPE));
assertEquals(Collections.singletonList(String.valueOf(ID_VALUE)), inputRow.getDimension(ID));
assertEquals(Collections.singletonList(String.valueOf(SOME_OTHER_ID_VALUE)), inputRow.getDimension(SOME_OTHER_ID));
assertEquals(Collections.singletonList(String.valueOf(true)), inputRow.getDimension(IS_VALID));
assertEquals(
Assert.assertEquals(Collections.singletonList(EVENT_TYPE_VALUE), inputRow.getDimension(EVENT_TYPE));
Assert.assertEquals(Collections.singletonList(String.valueOf(ID_VALUE)), inputRow.getDimension(ID));
Assert.assertEquals(
Collections.singletonList(String.valueOf(SOME_OTHER_ID_VALUE)),
inputRow.getDimension(SOME_OTHER_ID)
);
Assert.assertEquals(Collections.singletonList(String.valueOf(true)), inputRow.getDimension(IS_VALID));
Assert.assertEquals(
Lists.transform(SOME_INT_ARRAY_VALUE, String::valueOf),
inputRow.getDimension("someIntArray")
);
assertEquals(
Assert.assertEquals(
Lists.transform(SOME_STRING_ARRAY_VALUE, String::valueOf),
inputRow.getDimension("someStringArray")
);
// towards Map avro field as druid dimension, need to convert its toString() back to HashMap to check equality
assertEquals(1, inputRow.getDimension("someIntValueMap").size());
assertEquals(
Assert.assertEquals(1, inputRow.getDimension("someIntValueMap").size());
Assert.assertEquals(
SOME_INT_VALUE_MAP_VALUE,
new HashMap<CharSequence, Integer>(
Maps.transformValues(
@ -307,7 +309,7 @@ public class AvroStreamInputRowParserTest
)
)
);
assertEquals(
Assert.assertEquals(
SOME_STRING_VALUE_MAP_VALUE,
new HashMap<CharSequence, CharSequence>(
Splitter
@ -316,43 +318,46 @@ public class AvroStreamInputRowParserTest
.split(BRACES_AND_SPACE.matcher(inputRow.getDimension("someIntValueMap").get(0)).replaceAll(""))
)
);
assertEquals(Collections.singletonList(SOME_UNION_VALUE), inputRow.getDimension("someUnion"));
assertEquals(Collections.emptyList(), inputRow.getDimension("someNull"));
assertEquals(SOME_FIXED_VALUE, inputRow.getRaw("someFixed"));
assertEquals(
Assert.assertEquals(Collections.singletonList(SOME_UNION_VALUE), inputRow.getDimension("someUnion"));
Assert.assertEquals(Collections.emptyList(), inputRow.getDimension("someNull"));
Assert.assertEquals(SOME_FIXED_VALUE, inputRow.getRaw("someFixed"));
Assert.assertEquals(
Arrays.toString(SOME_BYTES_VALUE.array()),
Arrays.toString((byte[]) (inputRow.getRaw("someBytes")))
);
assertEquals(Collections.singletonList(String.valueOf(MyEnum.ENUM1)), inputRow.getDimension("someEnum"));
assertEquals(Collections.singletonList(String.valueOf(SOME_RECORD_VALUE)), inputRow.getDimension("someRecord"));
Assert.assertEquals(Collections.singletonList(String.valueOf(MyEnum.ENUM1)), inputRow.getDimension("someEnum"));
Assert.assertEquals(
Collections.singletonList(String.valueOf(SOME_RECORD_VALUE)),
inputRow.getDimension("someRecord")
);
// test metrics
assertEquals(SOME_FLOAT_VALUE, inputRow.getMetric("someFloat").floatValue(), 0);
assertEquals(SOME_LONG_VALUE, inputRow.getMetric("someLong"));
assertEquals(SOME_INT_VALUE, inputRow.getMetric("someInt"));
Assert.assertEquals(SOME_FLOAT_VALUE, inputRow.getMetric("someFloat").floatValue(), 0);
Assert.assertEquals(SOME_LONG_VALUE, inputRow.getMetric("someLong"));
Assert.assertEquals(SOME_INT_VALUE, inputRow.getMetric("someInt"));
}
public static SomeAvroDatum buildSomeAvroDatum()
{
return SomeAvroDatum.newBuilder()
.setTimestamp(DATE_TIME.getMillis())
.setEventType(EVENT_TYPE_VALUE)
.setId(ID_VALUE)
.setSomeOtherId(SOME_OTHER_ID_VALUE)
.setIsValid(true)
.setSomeFloat(SOME_FLOAT_VALUE)
.setSomeInt(SOME_INT_VALUE)
.setSomeLong(SOME_LONG_VALUE)
.setSomeIntArray(SOME_INT_ARRAY_VALUE)
.setSomeStringArray(SOME_STRING_ARRAY_VALUE)
.setSomeIntValueMap(SOME_INT_VALUE_MAP_VALUE)
.setSomeStringValueMap(SOME_STRING_VALUE_MAP_VALUE)
.setSomeUnion(SOME_UNION_VALUE)
.setSomeFixed(SOME_FIXED_VALUE)
.setSomeBytes(SOME_BYTES_VALUE)
.setSomeNull(null)
.setSomeEnum(MyEnum.ENUM1)
.setSomeRecord(SOME_RECORD_VALUE)
.build();
.setTimestamp(DATE_TIME.getMillis())
.setEventType(EVENT_TYPE_VALUE)
.setId(ID_VALUE)
.setSomeOtherId(SOME_OTHER_ID_VALUE)
.setIsValid(true)
.setSomeFloat(SOME_FLOAT_VALUE)
.setSomeInt(SOME_INT_VALUE)
.setSomeLong(SOME_LONG_VALUE)
.setSomeIntArray(SOME_INT_ARRAY_VALUE)
.setSomeStringArray(SOME_STRING_ARRAY_VALUE)
.setSomeIntValueMap(SOME_INT_VALUE_MAP_VALUE)
.setSomeStringValueMap(SOME_STRING_VALUE_MAP_VALUE)
.setSomeUnion(SOME_UNION_VALUE)
.setSomeFixed(SOME_FIXED_VALUE)
.setSomeBytes(SOME_BYTES_VALUE)
.setSomeNull(null)
.setSomeEnum(MyEnum.ENUM1)
.setSomeRecord(SOME_RECORD_VALUE)
.build();
}
}

View File

@ -31,18 +31,13 @@ import org.apache.druid.java.util.common.parsers.ParseException;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.mockito.ArgumentMatchers;
import org.mockito.Mockito;
import java.io.ByteArrayOutputStream;
import java.io.IOException;
import java.nio.ByteBuffer;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.eq;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
/**
*/
public class SchemaRegistryBasedAvroBytesDecoderTest
{
private SchemaRegistryClient registry;
@ -50,14 +45,14 @@ public class SchemaRegistryBasedAvroBytesDecoderTest
@Before
public void setUp()
{
registry = mock(SchemaRegistryClient.class);
registry = Mockito.mock(SchemaRegistryClient.class);
}
@Test
public void testParse() throws Exception
{
// Given
when(registry.getByID(eq(1234))).thenReturn(SomeAvroDatum.getClassSchema());
Mockito.when(registry.getByID(ArgumentMatchers.eq(1234))).thenReturn(SomeAvroDatum.getClassSchema());
GenericRecord someAvroDatum = AvroStreamInputRowParserTest.buildSomeAvroDatum();
Schema schema = SomeAvroDatum.getClassSchema();
byte[] bytes = getAvroDatum(schema, someAvroDatum);
@ -73,7 +68,7 @@ public class SchemaRegistryBasedAvroBytesDecoderTest
public void testParseCorrupted() throws Exception
{
// Given
when(registry.getByID(eq(1234))).thenReturn(SomeAvroDatum.getClassSchema());
Mockito.when(registry.getByID(ArgumentMatchers.eq(1234))).thenReturn(SomeAvroDatum.getClassSchema());
GenericRecord someAvroDatum = AvroStreamInputRowParserTest.buildSomeAvroDatum();
Schema schema = SomeAvroDatum.getClassSchema();
byte[] bytes = getAvroDatum(schema, someAvroDatum);
@ -86,7 +81,7 @@ public class SchemaRegistryBasedAvroBytesDecoderTest
public void testParseWrongId() throws Exception
{
// Given
when(registry.getByID(anyInt())).thenThrow(new IOException("no pasaran"));
Mockito.when(registry.getByID(ArgumentMatchers.anyInt())).thenThrow(new IOException("no pasaran"));
GenericRecord someAvroDatum = AvroStreamInputRowParserTest.buildSomeAvroDatum();
Schema schema = SomeAvroDatum.getClassSchema();
byte[] bytes = getAvroDatum(schema, someAvroDatum);
@ -102,5 +97,4 @@ public class SchemaRegistryBasedAvroBytesDecoderTest
writer.write(someAvroDatum, EncoderFactory.get().directBinaryEncoder(out, null));
return out.toByteArray();
}
}

View File

@ -30,12 +30,10 @@ import java.nio.ByteBuffer;
import java.util.Random;
import java.util.concurrent.ThreadLocalRandom;
import static org.junit.Assert.assertEquals;
public class BloomKFilterTest
{
private static final int COUNT = 100;
Random rand = ThreadLocalRandom.current();
private Random rand = ThreadLocalRandom.current();
@Test
public void testBloomKFilterBytes() throws IOException
@ -56,31 +54,31 @@ public class BloomKFilterTest
BloomKFilter.add(buffer, val);
BloomKFilter rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
buffer.position(0);
assertEquals(true, rehydrated.test(val));
assertEquals(false, rehydrated.test(val1));
assertEquals(false, rehydrated.test(val2));
assertEquals(false, rehydrated.test(val3));
Assert.assertTrue(rehydrated.test(val));
Assert.assertFalse(rehydrated.test(val1));
Assert.assertFalse(rehydrated.test(val2));
Assert.assertFalse(rehydrated.test(val3));
BloomKFilter.add(buffer, val1);
rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
buffer.position(0);
assertEquals(true, rehydrated.test(val));
assertEquals(true, rehydrated.test(val1));
assertEquals(false, rehydrated.test(val2));
assertEquals(false, rehydrated.test(val3));
Assert.assertTrue(rehydrated.test(val));
Assert.assertTrue(rehydrated.test(val1));
Assert.assertFalse(rehydrated.test(val2));
Assert.assertFalse(rehydrated.test(val3));
BloomKFilter.add(buffer, val2);
rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
buffer.position(0);
assertEquals(true, rehydrated.test(val));
assertEquals(true, rehydrated.test(val1));
assertEquals(true, rehydrated.test(val2));
assertEquals(false, rehydrated.test(val3));
Assert.assertTrue(rehydrated.test(val));
Assert.assertTrue(rehydrated.test(val1));
Assert.assertTrue(rehydrated.test(val2));
Assert.assertFalse(rehydrated.test(val3));
BloomKFilter.add(buffer, val3);
rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
buffer.position(0);
assertEquals(true, rehydrated.test(val));
assertEquals(true, rehydrated.test(val1));
assertEquals(true, rehydrated.test(val2));
assertEquals(true, rehydrated.test(val3));
Assert.assertTrue(rehydrated.test(val));
Assert.assertTrue(rehydrated.test(val1));
Assert.assertTrue(rehydrated.test(val2));
Assert.assertTrue(rehydrated.test(val3));
byte[] randVal = new byte[COUNT];
for (int i = 0; i < COUNT; i++) {
@ -89,16 +87,16 @@ public class BloomKFilterTest
}
// last value should be present
rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
assertEquals(true, rehydrated.test(randVal));
Assert.assertTrue(rehydrated.test(randVal));
// most likely this value should not exist
randVal[0] = 0;
randVal[1] = 0;
randVal[2] = 0;
randVal[3] = 0;
randVal[4] = 0;
assertEquals(false, rehydrated.test(randVal));
Assert.assertFalse(rehydrated.test(randVal));
assertEquals(7808, rehydrated.sizeInBytes());
Assert.assertEquals(7808, rehydrated.sizeInBytes());
}
@Test
@ -118,31 +116,31 @@ public class BloomKFilterTest
BloomKFilter.addLong(buffer, val);
BloomKFilter rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
buffer.position(0);
assertEquals(true, rehydrated.testLong(val));
assertEquals(false, rehydrated.testLong(val1));
assertEquals(false, rehydrated.testLong(val2));
assertEquals(false, rehydrated.testLong(val3));
Assert.assertTrue(rehydrated.testLong(val));
Assert.assertFalse(rehydrated.testLong(val1));
Assert.assertFalse(rehydrated.testLong(val2));
Assert.assertFalse(rehydrated.testLong(val3));
BloomKFilter.addLong(buffer, val1);
rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
buffer.position(0);
assertEquals(true, rehydrated.testLong(val));
assertEquals(true, rehydrated.testLong(val1));
assertEquals(false, rehydrated.testLong(val2));
assertEquals(false, rehydrated.testLong(val3));
Assert.assertTrue(rehydrated.testLong(val));
Assert.assertTrue(rehydrated.testLong(val1));
Assert.assertFalse(rehydrated.testLong(val2));
Assert.assertFalse(rehydrated.testLong(val3));
BloomKFilter.addLong(buffer, val2);
rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
buffer.position(0);
assertEquals(true, rehydrated.testLong(val));
assertEquals(true, rehydrated.testLong(val1));
assertEquals(true, rehydrated.testLong(val2));
assertEquals(false, rehydrated.testLong(val3));
Assert.assertTrue(rehydrated.testLong(val));
Assert.assertTrue(rehydrated.testLong(val1));
Assert.assertTrue(rehydrated.testLong(val2));
Assert.assertFalse(rehydrated.testLong(val3));
BloomKFilter.addLong(buffer, val3);
rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
buffer.position(0);
assertEquals(true, rehydrated.testLong(val));
assertEquals(true, rehydrated.testLong(val1));
assertEquals(true, rehydrated.testLong(val2));
assertEquals(true, rehydrated.testLong(val3));
Assert.assertTrue(rehydrated.testLong(val));
Assert.assertTrue(rehydrated.testLong(val1));
Assert.assertTrue(rehydrated.testLong(val2));
Assert.assertTrue(rehydrated.testLong(val3));
byte randVal = 0;
for (int i = 0; i < COUNT; i++) {
@ -153,11 +151,11 @@ public class BloomKFilterTest
rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
// last value should be present
assertEquals(true, rehydrated.testLong(randVal));
Assert.assertTrue(rehydrated.testLong(randVal));
// most likely this value should not exist
assertEquals(false, rehydrated.testLong((byte) -120));
Assert.assertFalse(rehydrated.testLong((byte) -120));
assertEquals(7808, rehydrated.sizeInBytes());
Assert.assertEquals(7808, rehydrated.sizeInBytes());
}
@Test
@ -177,31 +175,31 @@ public class BloomKFilterTest
BloomKFilter.addLong(buffer, val);
BloomKFilter rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
buffer.position(0);
assertEquals(true, rehydrated.testLong(val));
assertEquals(false, rehydrated.testLong(val1));
assertEquals(false, rehydrated.testLong(val2));
assertEquals(false, rehydrated.testLong(val3));
Assert.assertTrue(rehydrated.testLong(val));
Assert.assertFalse(rehydrated.testLong(val1));
Assert.assertFalse(rehydrated.testLong(val2));
Assert.assertFalse(rehydrated.testLong(val3));
BloomKFilter.addLong(buffer, val1);
rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
buffer.position(0);
assertEquals(true, rehydrated.testLong(val));
assertEquals(true, rehydrated.testLong(val1));
assertEquals(false, rehydrated.testLong(val2));
assertEquals(false, rehydrated.testLong(val3));
Assert.assertTrue(rehydrated.testLong(val));
Assert.assertTrue(rehydrated.testLong(val1));
Assert.assertFalse(rehydrated.testLong(val2));
Assert.assertFalse(rehydrated.testLong(val3));
BloomKFilter.addLong(buffer, val2);
rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
buffer.position(0);
assertEquals(true, rehydrated.testLong(val));
assertEquals(true, rehydrated.testLong(val1));
assertEquals(true, rehydrated.testLong(val2));
assertEquals(false, rehydrated.testLong(val3));
Assert.assertTrue(rehydrated.testLong(val));
Assert.assertTrue(rehydrated.testLong(val1));
Assert.assertTrue(rehydrated.testLong(val2));
Assert.assertFalse(rehydrated.testLong(val3));
BloomKFilter.addLong(buffer, val3);
rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
buffer.position(0);
assertEquals(true, rehydrated.testLong(val));
assertEquals(true, rehydrated.testLong(val1));
assertEquals(true, rehydrated.testLong(val2));
assertEquals(true, rehydrated.testLong(val3));
Assert.assertTrue(rehydrated.testLong(val));
Assert.assertTrue(rehydrated.testLong(val1));
Assert.assertTrue(rehydrated.testLong(val2));
Assert.assertTrue(rehydrated.testLong(val3));
int randVal = 0;
for (int i = 0; i < COUNT; i++) {
@ -210,11 +208,11 @@ public class BloomKFilterTest
}
rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
// last value should be present
assertEquals(true, rehydrated.testLong(randVal));
Assert.assertTrue(rehydrated.testLong(randVal));
// most likely this value should not exist
assertEquals(false, rehydrated.testLong(-120));
Assert.assertFalse(rehydrated.testLong(-120));
assertEquals(7808, rehydrated.sizeInBytes());
Assert.assertEquals(7808, rehydrated.sizeInBytes());
}
@Test
@ -234,31 +232,31 @@ public class BloomKFilterTest
BloomKFilter.addLong(buffer, val);
BloomKFilter rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
buffer.position(0);
assertEquals(true, rehydrated.testLong(val));
assertEquals(false, rehydrated.testLong(val1));
assertEquals(false, rehydrated.testLong(val2));
assertEquals(false, rehydrated.testLong(val3));
Assert.assertTrue(rehydrated.testLong(val));
Assert.assertFalse(rehydrated.testLong(val1));
Assert.assertFalse(rehydrated.testLong(val2));
Assert.assertFalse(rehydrated.testLong(val3));
BloomKFilter.addLong(buffer, val1);
rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
buffer.position(0);
assertEquals(true, rehydrated.testLong(val));
assertEquals(true, rehydrated.testLong(val1));
assertEquals(false, rehydrated.testLong(val2));
assertEquals(false, rehydrated.testLong(val3));
Assert.assertTrue(rehydrated.testLong(val));
Assert.assertTrue(rehydrated.testLong(val1));
Assert.assertFalse(rehydrated.testLong(val2));
Assert.assertFalse(rehydrated.testLong(val3));
BloomKFilter.addLong(buffer, val2);
rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
buffer.position(0);
assertEquals(true, rehydrated.testLong(val));
assertEquals(true, rehydrated.testLong(val1));
assertEquals(true, rehydrated.testLong(val2));
assertEquals(false, rehydrated.testLong(val3));
Assert.assertTrue(rehydrated.testLong(val));
Assert.assertTrue(rehydrated.testLong(val1));
Assert.assertTrue(rehydrated.testLong(val2));
Assert.assertFalse(rehydrated.testLong(val3));
BloomKFilter.addLong(buffer, val3);
rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
buffer.position(0);
assertEquals(true, rehydrated.testLong(val));
assertEquals(true, rehydrated.testLong(val1));
assertEquals(true, rehydrated.testLong(val2));
assertEquals(true, rehydrated.testLong(val3));
Assert.assertTrue(rehydrated.testLong(val));
Assert.assertTrue(rehydrated.testLong(val1));
Assert.assertTrue(rehydrated.testLong(val2));
Assert.assertTrue(rehydrated.testLong(val3));
int randVal = 0;
for (int i = 0; i < COUNT; i++) {
@ -267,11 +265,11 @@ public class BloomKFilterTest
}
rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
// last value should be present
assertEquals(true, rehydrated.testLong(randVal));
Assert.assertTrue(rehydrated.testLong(randVal));
// most likely this value should not exist
assertEquals(false, rehydrated.testLong(-120));
Assert.assertFalse(rehydrated.testLong(-120));
assertEquals(7808, rehydrated.sizeInBytes());
Assert.assertEquals(7808, rehydrated.sizeInBytes());
}
@Test
@ -291,31 +289,31 @@ public class BloomKFilterTest
BloomKFilter.addFloat(buffer, val);
BloomKFilter rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
buffer.position(0);
assertEquals(true, rehydrated.testFloat(val));
assertEquals(false, rehydrated.testFloat(val1));
assertEquals(false, rehydrated.testFloat(val2));
assertEquals(false, rehydrated.testFloat(val3));
Assert.assertTrue(rehydrated.testFloat(val));
Assert.assertFalse(rehydrated.testFloat(val1));
Assert.assertFalse(rehydrated.testFloat(val2));
Assert.assertFalse(rehydrated.testFloat(val3));
BloomKFilter.addFloat(buffer, val1);
rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
buffer.position(0);
assertEquals(true, rehydrated.testFloat(val));
assertEquals(true, rehydrated.testFloat(val1));
assertEquals(false, rehydrated.testFloat(val2));
assertEquals(false, rehydrated.testFloat(val3));
Assert.assertTrue(rehydrated.testFloat(val));
Assert.assertTrue(rehydrated.testFloat(val1));
Assert.assertFalse(rehydrated.testFloat(val2));
Assert.assertFalse(rehydrated.testFloat(val3));
BloomKFilter.addFloat(buffer, val2);
rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
buffer.position(0);
assertEquals(true, rehydrated.testFloat(val));
assertEquals(true, rehydrated.testFloat(val1));
assertEquals(true, rehydrated.testFloat(val2));
assertEquals(false, rehydrated.testFloat(val3));
Assert.assertTrue(rehydrated.testFloat(val));
Assert.assertTrue(rehydrated.testFloat(val1));
Assert.assertTrue(rehydrated.testFloat(val2));
Assert.assertFalse(rehydrated.testFloat(val3));
BloomKFilter.addFloat(buffer, val3);
rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
buffer.position(0);
assertEquals(true, rehydrated.testFloat(val));
assertEquals(true, rehydrated.testFloat(val1));
assertEquals(true, rehydrated.testFloat(val2));
assertEquals(true, rehydrated.testFloat(val3));
Assert.assertTrue(rehydrated.testFloat(val));
Assert.assertTrue(rehydrated.testFloat(val1));
Assert.assertTrue(rehydrated.testFloat(val2));
Assert.assertTrue(rehydrated.testFloat(val3));
float randVal = 0;
for (int i = 0; i < COUNT; i++) {
@ -325,11 +323,11 @@ public class BloomKFilterTest
rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
// last value should be present
assertEquals(true, rehydrated.testFloat(randVal));
Assert.assertTrue(rehydrated.testFloat(randVal));
// most likely this value should not exist
assertEquals(false, rehydrated.testFloat(-120.2f));
Assert.assertFalse(rehydrated.testFloat(-120.2f));
assertEquals(7808, rehydrated.sizeInBytes());
Assert.assertEquals(7808, rehydrated.sizeInBytes());
}
@Test
@ -349,31 +347,31 @@ public class BloomKFilterTest
BloomKFilter.addDouble(buffer, val);
BloomKFilter rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
buffer.position(0);
assertEquals(true, rehydrated.testDouble(val));
assertEquals(false, rehydrated.testDouble(val1));
assertEquals(false, rehydrated.testDouble(val2));
assertEquals(false, rehydrated.testDouble(val3));
Assert.assertTrue(rehydrated.testDouble(val));
Assert.assertFalse(rehydrated.testDouble(val1));
Assert.assertFalse(rehydrated.testDouble(val2));
Assert.assertFalse(rehydrated.testDouble(val3));
BloomKFilter.addDouble(buffer, val1);
rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
buffer.position(0);
assertEquals(true, rehydrated.testDouble(val));
assertEquals(true, rehydrated.testDouble(val1));
assertEquals(false, rehydrated.testDouble(val2));
assertEquals(false, rehydrated.testDouble(val3));
Assert.assertTrue(rehydrated.testDouble(val));
Assert.assertTrue(rehydrated.testDouble(val1));
Assert.assertFalse(rehydrated.testDouble(val2));
Assert.assertFalse(rehydrated.testDouble(val3));
BloomKFilter.addDouble(buffer, val2);
rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
buffer.position(0);
assertEquals(true, rehydrated.testDouble(val));
assertEquals(true, rehydrated.testDouble(val1));
assertEquals(true, rehydrated.testDouble(val2));
assertEquals(false, rehydrated.testDouble(val3));
Assert.assertTrue(rehydrated.testDouble(val));
Assert.assertTrue(rehydrated.testDouble(val1));
Assert.assertTrue(rehydrated.testDouble(val2));
Assert.assertFalse(rehydrated.testDouble(val3));
BloomKFilter.addDouble(buffer, val3);
rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
buffer.position(0);
assertEquals(true, rehydrated.testDouble(val));
assertEquals(true, rehydrated.testDouble(val1));
assertEquals(true, rehydrated.testDouble(val2));
assertEquals(true, rehydrated.testDouble(val3));
Assert.assertTrue(rehydrated.testDouble(val));
Assert.assertTrue(rehydrated.testDouble(val1));
Assert.assertTrue(rehydrated.testDouble(val2));
Assert.assertTrue(rehydrated.testDouble(val3));
double randVal = 0;
for (int i = 0; i < COUNT; i++) {
@ -383,11 +381,11 @@ public class BloomKFilterTest
rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
// last value should be present
assertEquals(true, rehydrated.testDouble(randVal));
Assert.assertTrue(rehydrated.testDouble(randVal));
// most likely this value should not exist
assertEquals(false, rehydrated.testDouble(-120.2d));
Assert.assertFalse(rehydrated.testDouble(-120.2d));
assertEquals(7808, rehydrated.sizeInBytes());
Assert.assertEquals(7808, rehydrated.sizeInBytes());
}
@Test
@ -407,31 +405,31 @@ public class BloomKFilterTest
BloomKFilter.addString(buffer, val);
BloomKFilter rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
buffer.position(0);
assertEquals(true, rehydrated.testString(val));
assertEquals(false, rehydrated.testString(val1));
assertEquals(false, rehydrated.testString(val2));
assertEquals(false, rehydrated.testString(val3));
Assert.assertTrue(rehydrated.testString(val));
Assert.assertFalse(rehydrated.testString(val1));
Assert.assertFalse(rehydrated.testString(val2));
Assert.assertFalse(rehydrated.testString(val3));
BloomKFilter.addString(buffer, val1);
rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
buffer.position(0);
assertEquals(true, rehydrated.testString(val));
assertEquals(true, rehydrated.testString(val1));
assertEquals(false, rehydrated.testString(val2));
assertEquals(false, rehydrated.testString(val3));
Assert.assertTrue(rehydrated.testString(val));
Assert.assertTrue(rehydrated.testString(val1));
Assert.assertFalse(rehydrated.testString(val2));
Assert.assertFalse(rehydrated.testString(val3));
BloomKFilter.addString(buffer, val2);
rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
buffer.position(0);
assertEquals(true, rehydrated.testString(val));
assertEquals(true, rehydrated.testString(val1));
assertEquals(true, rehydrated.testString(val2));
assertEquals(false, rehydrated.testString(val3));
Assert.assertTrue(rehydrated.testString(val));
Assert.assertTrue(rehydrated.testString(val1));
Assert.assertTrue(rehydrated.testString(val2));
Assert.assertFalse(rehydrated.testString(val3));
BloomKFilter.addString(buffer, val3);
rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
buffer.position(0);
assertEquals(true, rehydrated.testString(val));
assertEquals(true, rehydrated.testString(val1));
assertEquals(true, rehydrated.testString(val2));
assertEquals(true, rehydrated.testString(val3));
Assert.assertTrue(rehydrated.testString(val));
Assert.assertTrue(rehydrated.testString(val1));
Assert.assertTrue(rehydrated.testString(val2));
Assert.assertTrue(rehydrated.testString(val3));
long randVal = 0;
for (int i = 0; i < COUNT; i++) {
@ -440,11 +438,11 @@ public class BloomKFilterTest
}
rehydrated = BloomKFilter.deserialize(new ByteBufferInputStream(buffer));
// last value should be present
assertEquals(true, rehydrated.testString(Long.toString(randVal)));
Assert.assertTrue(rehydrated.testString(Long.toString(randVal)));
// most likely this value should not exist
assertEquals(false, rehydrated.testString(Long.toString(-120)));
Assert.assertFalse(rehydrated.testString(Long.toString(-120)));
assertEquals(77952, rehydrated.sizeInBytes());
Assert.assertEquals(77952, rehydrated.sizeInBytes());
}
@Test
@ -500,10 +498,10 @@ public class BloomKFilterTest
BloomKFilter bfMerged = BloomKFilter.deserialize(bytesIn);
// All values should pass test
for (String val : inputs1) {
assert bfMerged.testString(val);
Assert.assertTrue(bfMerged.testString(val));
}
for (String val : inputs2) {
assert bfMerged.testString(val);
Assert.assertTrue(bfMerged.testString(val));
}
}

View File

@ -19,14 +19,13 @@
package org.apache.druid.storage.google;
import org.easymock.EasyMock;
import org.easymock.EasyMockSupport;
import org.junit.Test;
import java.io.IOException;
import java.io.InputStream;
import static org.easymock.EasyMock.expect;
public class GoogleByteSourceTest extends EasyMockSupport
{
@Test
@ -37,7 +36,7 @@ public class GoogleByteSourceTest extends EasyMockSupport
GoogleStorage storage = createMock(GoogleStorage.class);
InputStream stream = createMock(InputStream.class);
expect(storage.get(bucket, path)).andReturn(stream);
EasyMock.expect(storage.get(bucket, path)).andReturn(stream);
replayAll();
@ -55,7 +54,7 @@ public class GoogleByteSourceTest extends EasyMockSupport
final String path = "/path/to/file";
GoogleStorage storage = createMock(GoogleStorage.class);
expect(storage.get(bucket, path)).andThrow(new IOException(""));
EasyMock.expect(storage.get(bucket, path)).andThrow(new IOException(""));
replayAll();

View File

@ -35,19 +35,17 @@ import org.junit.Test;
import java.io.IOException;
import static org.easymock.EasyMock.expectLastCall;
public class GoogleDataSegmentKillerTest extends EasyMockSupport
{
private static final String bucket = "bucket";
private static final String indexPath = "test/2015-04-12T00:00:00.000Z_2015-04-13T00:00:00.000Z/1/0/index.zip";
private static final String descriptorPath = DataSegmentKiller.descriptorPath(indexPath);
private static final String BUCKET = "bucket";
private static final String INDEX_PATH = "test/2015-04-12T00:00:00.000Z_2015-04-13T00:00:00.000Z/1/0/index.zip";
private static final String DESCRIPTOR_PATH = DataSegmentKiller.descriptorPath(INDEX_PATH);
private static final DataSegment dataSegment = new DataSegment(
private static final DataSegment DATA_SEGMENT = new DataSegment(
"test",
Intervals.of("2015-04-12/2015-04-13"),
"1",
ImmutableMap.of("bucket", bucket, "path", indexPath),
ImmutableMap.of("bucket", BUCKET, "path", INDEX_PATH),
null,
null,
NoneShardSpec.instance(),
@ -66,16 +64,16 @@ public class GoogleDataSegmentKillerTest extends EasyMockSupport
@Test
public void killTest() throws SegmentLoadingException, IOException
{
storage.delete(EasyMock.eq(bucket), EasyMock.eq(indexPath));
expectLastCall();
storage.delete(EasyMock.eq(bucket), EasyMock.eq(descriptorPath));
expectLastCall();
storage.delete(EasyMock.eq(BUCKET), EasyMock.eq(INDEX_PATH));
EasyMock.expectLastCall();
storage.delete(EasyMock.eq(BUCKET), EasyMock.eq(DESCRIPTOR_PATH));
EasyMock.expectLastCall();
replayAll();
GoogleDataSegmentKiller killer = new GoogleDataSegmentKiller(storage);
killer.kill(dataSegment);
killer.kill(DATA_SEGMENT);
verifyAll();
}
@ -88,14 +86,14 @@ public class GoogleDataSegmentKillerTest extends EasyMockSupport
300,
"test"
);
storage.delete(EasyMock.eq(bucket), EasyMock.eq(indexPath));
expectLastCall().andThrow(exception);
storage.delete(EasyMock.eq(BUCKET), EasyMock.eq(INDEX_PATH));
EasyMock.expectLastCall().andThrow(exception);
replayAll();
GoogleDataSegmentKiller killer = new GoogleDataSegmentKiller(storage);
killer.kill(dataSegment);
killer.kill(DATA_SEGMENT);
verifyAll();
}
@ -108,16 +106,16 @@ public class GoogleDataSegmentKillerTest extends EasyMockSupport
500,
"test"
);
storage.delete(EasyMock.eq(bucket), EasyMock.eq(indexPath));
expectLastCall().andThrow(exception).once().andVoid().once();
storage.delete(EasyMock.eq(bucket), EasyMock.eq(descriptorPath));
expectLastCall().andThrow(exception).once().andVoid().once();
storage.delete(EasyMock.eq(BUCKET), EasyMock.eq(INDEX_PATH));
EasyMock.expectLastCall().andThrow(exception).once().andVoid().once();
storage.delete(EasyMock.eq(BUCKET), EasyMock.eq(DESCRIPTOR_PATH));
EasyMock.expectLastCall().andThrow(exception).once().andVoid().once();
replayAll();
GoogleDataSegmentKiller killer = new GoogleDataSegmentKiller(storage);
killer.kill(dataSegment);
killer.kill(DATA_SEGMENT);
verifyAll();
}

View File

@ -26,19 +26,17 @@ import org.apache.commons.io.FileUtils;
import org.apache.druid.segment.loading.SegmentLoadingException;
import org.easymock.EasyMock;
import org.easymock.EasyMockSupport;
import org.junit.Assert;
import org.junit.Test;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import static org.easymock.EasyMock.expect;
import static org.junit.Assert.assertFalse;
public class GoogleDataSegmentPullerTest extends EasyMockSupport
{
private static final String bucket = "bucket";
private static final String path = "/path/to/storage/index.zip";
private static final String BUCKET = "bucket";
private static final String PATH = "/path/to/storage/index.zip";
@Test(expected = SegmentLoadingException.class)
public void testDeleteOutputDirectoryWhenErrorIsRaisedPullingSegmentFiles()
@ -52,14 +50,14 @@ public class GoogleDataSegmentPullerTest extends EasyMockSupport
300,
"test"
);
expect(storage.get(EasyMock.eq(bucket), EasyMock.eq(path))).andThrow(exception);
EasyMock.expect(storage.get(EasyMock.eq(BUCKET), EasyMock.eq(PATH))).andThrow(exception);
replayAll();
GoogleDataSegmentPuller puller = new GoogleDataSegmentPuller(storage);
puller.getSegmentFiles(bucket, path, outDir);
puller.getSegmentFiles(BUCKET, PATH, outDir);
assertFalse(outDir.exists());
Assert.assertFalse(outDir.exists());
verifyAll();
}

View File

@ -36,15 +36,13 @@ import java.io.File;
import java.util.ArrayList;
import java.util.HashMap;
import static org.easymock.EasyMock.expectLastCall;
public class GoogleDataSegmentPusherTest extends EasyMockSupport
{
@Rule
public final TemporaryFolder tempFolder = new TemporaryFolder();
private static final String bucket = "bucket";
private static final String prefix = "prefix";
private static final String BUCKET = "bucket";
private static final String PREFIX = "prefix";
private GoogleStorage storage;
private GoogleAccountConfig googleAccountConfig;
@ -54,8 +52,8 @@ public class GoogleDataSegmentPusherTest extends EasyMockSupport
{
storage = createMock(GoogleStorage.class);
googleAccountConfig = new GoogleAccountConfig();
googleAccountConfig.setBucket(bucket);
googleAccountConfig.setPrefix(prefix);
googleAccountConfig.setBucket(BUCKET);
googleAccountConfig.setPrefix(PREFIX);
}
@Test
@ -86,14 +84,14 @@ public class GoogleDataSegmentPusherTest extends EasyMockSupport
.createMock();
final String storageDir = pusher.getStorageDir(segmentToPush, false);
final String indexPath = prefix + "/" + storageDir + "/" + "index.zip";
final String indexPath = PREFIX + "/" + storageDir + "/" + "index.zip";
pusher.insert(
EasyMock.anyObject(File.class),
EasyMock.eq("application/zip"),
EasyMock.eq(indexPath)
);
expectLastCall();
EasyMock.expectLastCall();
replayAll();
@ -102,12 +100,9 @@ public class GoogleDataSegmentPusherTest extends EasyMockSupport
Assert.assertEquals(segmentToPush.getSize(), segment.getSize());
Assert.assertEquals(segmentToPush, segment);
Assert.assertEquals(ImmutableMap.of(
"type",
GoogleStorageDruidModule.SCHEME,
"bucket",
bucket,
"path",
indexPath
"type", GoogleStorageDruidModule.SCHEME,
"bucket", BUCKET,
"path", indexPath
), segment.getLoadSpec());
verifyAll();

View File

@ -38,14 +38,11 @@ import java.io.File;
import java.io.StringWriter;
import java.nio.charset.StandardCharsets;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
public class GoogleTaskLogsTest extends EasyMockSupport
{
private static final String bucket = "test";
private static final String prefix = "test/log";
private static final String taskid = "taskid";
private static final String BUCKET = "test";
private static final String PREFIX = "test/log";
private static final String TASKID = "taskid";
private GoogleStorage storage;
private GoogleTaskLogs googleTaskLogs;
@ -54,7 +51,7 @@ public class GoogleTaskLogsTest extends EasyMockSupport
public void before()
{
storage = createMock(GoogleStorage.class);
GoogleTaskLogsConfig config = new GoogleTaskLogsConfig(bucket, prefix);
GoogleTaskLogsConfig config = new GoogleTaskLogsConfig(BUCKET, PREFIX);
googleTaskLogs = new GoogleTaskLogs(config, storage);
}
@ -69,12 +66,16 @@ public class GoogleTaskLogsTest extends EasyMockSupport
output.write("test");
output.close();
storage.insert(EasyMock.eq(bucket), EasyMock.eq(prefix + "/" + taskid), EasyMock.anyObject(InputStreamContent.class));
expectLastCall();
storage.insert(
EasyMock.eq(BUCKET),
EasyMock.eq(PREFIX + "/" + TASKID),
EasyMock.anyObject(InputStreamContent.class)
);
EasyMock.expectLastCall();
replayAll();
googleTaskLogs.pushTaskLog(taskid, logFile);
googleTaskLogs.pushTaskLog(TASKID, logFile);
verifyAll();
}
@ -88,14 +89,14 @@ public class GoogleTaskLogsTest extends EasyMockSupport
{
final String testLog = "hello this is a log";
final String logPath = prefix + "/" + taskid;
expect(storage.exists(bucket, logPath)).andReturn(true);
expect(storage.size(bucket, logPath)).andReturn((long) testLog.length());
expect(storage.get(bucket, logPath)).andReturn(new ByteArrayInputStream(StringUtils.toUtf8(testLog)));
final String logPath = PREFIX + "/" + TASKID;
EasyMock.expect(storage.exists(BUCKET, logPath)).andReturn(true);
EasyMock.expect(storage.size(BUCKET, logPath)).andReturn((long) testLog.length());
EasyMock.expect(storage.get(BUCKET, logPath)).andReturn(new ByteArrayInputStream(StringUtils.toUtf8(testLog)));
replayAll();
final Optional<ByteSource> byteSource = googleTaskLogs.streamTaskLog(taskid, 0);
final Optional<ByteSource> byteSource = googleTaskLogs.streamTaskLog(TASKID, 0);
final StringWriter writer = new StringWriter();
IOUtils.copy(byteSource.get().openStream(), writer, "UTF-8");
@ -109,14 +110,14 @@ public class GoogleTaskLogsTest extends EasyMockSupport
{
final String testLog = "hello this is a log";
final String logPath = prefix + "/" + taskid;
expect(storage.exists(bucket, logPath)).andReturn(true);
expect(storage.size(bucket, logPath)).andReturn((long) testLog.length());
expect(storage.get(bucket, logPath)).andReturn(new ByteArrayInputStream(StringUtils.toUtf8(testLog)));
final String logPath = PREFIX + "/" + TASKID;
EasyMock.expect(storage.exists(BUCKET, logPath)).andReturn(true);
EasyMock.expect(storage.size(BUCKET, logPath)).andReturn((long) testLog.length());
EasyMock.expect(storage.get(BUCKET, logPath)).andReturn(new ByteArrayInputStream(StringUtils.toUtf8(testLog)));
replayAll();
final Optional<ByteSource> byteSource = googleTaskLogs.streamTaskLog(taskid, 5);
final Optional<ByteSource> byteSource = googleTaskLogs.streamTaskLog(TASKID, 5);
final StringWriter writer = new StringWriter();
IOUtils.copy(byteSource.get().openStream(), writer, "UTF-8");
@ -130,14 +131,14 @@ public class GoogleTaskLogsTest extends EasyMockSupport
{
final String testLog = "hello this is a log";
final String logPath = prefix + "/" + taskid;
expect(storage.exists(bucket, logPath)).andReturn(true);
expect(storage.size(bucket, logPath)).andReturn((long) testLog.length());
expect(storage.get(bucket, logPath)).andReturn(new ByteArrayInputStream(StringUtils.toUtf8(testLog)));
final String logPath = PREFIX + "/" + TASKID;
EasyMock.expect(storage.exists(BUCKET, logPath)).andReturn(true);
EasyMock.expect(storage.size(BUCKET, logPath)).andReturn((long) testLog.length());
EasyMock.expect(storage.get(BUCKET, logPath)).andReturn(new ByteArrayInputStream(StringUtils.toUtf8(testLog)));
replayAll();
final Optional<ByteSource> byteSource = googleTaskLogs.streamTaskLog(taskid, -3);
final Optional<ByteSource> byteSource = googleTaskLogs.streamTaskLog(TASKID, -3);
final StringWriter writer = new StringWriter();
IOUtils.copy(byteSource.get().openStream(), writer, "UTF-8");

View File

@ -57,8 +57,6 @@ import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import static org.apache.druid.query.lookup.KafkaLookupExtractorFactory.DEFAULT_STRING_DECODER;
@RunWith(PowerMockRunner.class)
@PrepareForTest({
NamespaceExtractionCacheManager.class,
@ -80,7 +78,8 @@ public class KafkaLookupExtractorFactoryTest
"some.property", "some.value"
);
private final ObjectMapper mapper = new DefaultObjectMapper();
private final NamespaceExtractionCacheManager cacheManager = PowerMock.createStrictMock(NamespaceExtractionCacheManager.class);
private final NamespaceExtractionCacheManager cacheManager = PowerMock.createStrictMock(
NamespaceExtractionCacheManager.class);
private final CacheHandler cacheHandler = PowerMock.createStrictMock(CacheHandler.class);
@ -199,6 +198,7 @@ public class KafkaLookupExtractorFactoryTest
DEFAULT_PROPERTIES
);
factory1.getMapRef().set(ImmutableMap.of());
//noinspection StringConcatenationMissingWhitespace
final KafkaLookupExtractorFactory factory2 = new KafkaLookupExtractorFactory(
cacheManager,
TOPIC + "b",
@ -228,6 +228,7 @@ public class KafkaLookupExtractorFactoryTest
DEFAULT_PROPERTIES
)));
//noinspection StringConcatenationMissingWhitespace
Assert.assertTrue(factory.replaces(new KafkaLookupExtractorFactory(
cacheManager,
TOPIC + "b",
@ -283,29 +284,23 @@ public class KafkaLookupExtractorFactoryTest
EasyMock.expect(consumerConnector.createMessageStreamsByFilter(
EasyMock.anyObject(TopicFilter.class),
EasyMock.anyInt(),
EasyMock.eq(
DEFAULT_STRING_DECODER),
EasyMock.eq(DEFAULT_STRING_DECODER)
EasyMock.eq(KafkaLookupExtractorFactory.DEFAULT_STRING_DECODER),
EasyMock.eq(KafkaLookupExtractorFactory.DEFAULT_STRING_DECODER)
)).andReturn(ImmutableList.of(kafkaStream)).once();
EasyMock.expect(kafkaStream.iterator()).andReturn(consumerIterator).anyTimes();
EasyMock.expect(consumerIterator.hasNext()).andAnswer(getBlockingAnswer()).anyTimes();
EasyMock.expect(cacheManager.createCache())
.andReturn(cacheHandler)
.once();
EasyMock.expect(cacheHandler.getCache()).andReturn(new ConcurrentHashMap<String, String>()).once();
EasyMock.expect(cacheHandler.getCache()).andReturn(new ConcurrentHashMap<>()).once();
cacheHandler.close();
EasyMock.expectLastCall();
final AtomicBoolean threadWasInterrupted = new AtomicBoolean(false);
consumerConnector.shutdown();
EasyMock.expectLastCall().andAnswer(new IAnswer<Object>()
{
@Override
public Object answer()
{
threadWasInterrupted.set(Thread.currentThread().isInterrupted());
return null;
}
EasyMock.expectLastCall().andAnswer(() -> {
threadWasInterrupted.set(Thread.currentThread().isInterrupted());
return null;
}).times(2);
PowerMock.replay(cacheManager, cacheHandler, kafkaStream, consumerConnector, consumerIterator);
@ -379,16 +374,15 @@ public class KafkaLookupExtractorFactoryTest
EasyMock.expect(consumerConnector.createMessageStreamsByFilter(
EasyMock.anyObject(TopicFilter.class),
EasyMock.anyInt(),
EasyMock.eq(
DEFAULT_STRING_DECODER),
EasyMock.eq(DEFAULT_STRING_DECODER)
EasyMock.eq(KafkaLookupExtractorFactory.DEFAULT_STRING_DECODER),
EasyMock.eq(KafkaLookupExtractorFactory.DEFAULT_STRING_DECODER)
)).andReturn(ImmutableList.of(kafkaStream)).once();
EasyMock.expect(kafkaStream.iterator()).andReturn(consumerIterator).anyTimes();
EasyMock.expect(consumerIterator.hasNext()).andAnswer(getBlockingAnswer()).anyTimes();
EasyMock.expect(cacheManager.createCache())
.andReturn(cacheHandler)
.once();
EasyMock.expect(cacheHandler.getCache()).andReturn(new ConcurrentHashMap<String, String>()).once();
EasyMock.expect(cacheHandler.getCache()).andReturn(new ConcurrentHashMap<>()).once();
cacheHandler.close();
EasyMock.expectLastCall().once();
consumerConnector.shutdown();
@ -421,16 +415,15 @@ public class KafkaLookupExtractorFactoryTest
EasyMock.expect(consumerConnector.createMessageStreamsByFilter(
EasyMock.anyObject(TopicFilter.class),
EasyMock.anyInt(),
EasyMock.eq(
DEFAULT_STRING_DECODER),
EasyMock.eq(DEFAULT_STRING_DECODER)
EasyMock.eq(KafkaLookupExtractorFactory.DEFAULT_STRING_DECODER),
EasyMock.eq(KafkaLookupExtractorFactory.DEFAULT_STRING_DECODER)
)).andReturn(ImmutableList.of(kafkaStream)).once();
EasyMock.expect(kafkaStream.iterator()).andReturn(consumerIterator).anyTimes();
EasyMock.expect(consumerIterator.hasNext()).andAnswer(getBlockingAnswer()).anyTimes();
EasyMock.expect(cacheManager.createCache())
.andReturn(cacheHandler)
.once();
EasyMock.expect(cacheHandler.getCache()).andReturn(new ConcurrentHashMap<String, String>()).once();
EasyMock.expect(cacheHandler.getCache()).andReturn(new ConcurrentHashMap<>()).once();
cacheHandler.close();
EasyMock.expectLastCall().once();
consumerConnector.shutdown();
@ -544,21 +537,16 @@ public class KafkaLookupExtractorFactoryTest
public void testDefaultDecoder()
{
final String str = "some string";
Assert.assertEquals(str, DEFAULT_STRING_DECODER.fromBytes(StringUtils.toUtf8(str)));
Assert.assertEquals(str, KafkaLookupExtractorFactory.DEFAULT_STRING_DECODER.fromBytes(StringUtils.toUtf8(str)));
}
private IAnswer<Boolean> getBlockingAnswer()
{
return new IAnswer<Boolean>()
{
@Override
public Boolean answer() throws Throwable
{
Thread.sleep(60000);
Assert.fail("Test failed to complete within 60000ms");
return () -> {
Thread.sleep(60000);
Assert.fail("Test failed to complete within 60000ms");
return false;
}
return false;
};
}
}

View File

@ -66,8 +66,6 @@ import java.util.Collections;
import java.util.List;
import java.util.Map;
import static org.easymock.EasyMock.expect;
@RunWith(Parameterized.class)
public class KafkaIndexTaskClientTest extends EasyMockSupport
{
@ -85,7 +83,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
private static final long TEST_NUM_RETRIES = 0;
private static final String URL_FORMATTER = "http://%s:%d/druid/worker/v1/chat/%s/%s";
private int numThreads;
private final int numThreads;
private HttpClient httpClient;
private TaskInfoProvider taskInfoProvider;
private FullResponseHolder responseHolder;
@ -114,20 +112,20 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
headers = createMock(HttpHeaders.class);
client = new TestableKafkaIndexTaskClient(httpClient, objectMapper, taskInfoProvider);
expect(taskInfoProvider.getTaskLocation(TEST_ID))
.andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT))
.anyTimes();
expect(taskInfoProvider.getTaskStatus(TEST_ID))
.andReturn(Optional.of(TaskStatus.running(TEST_ID)))
.anyTimes();
EasyMock.expect(taskInfoProvider.getTaskLocation(TEST_ID))
.andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT))
.anyTimes();
EasyMock.expect(taskInfoProvider.getTaskStatus(TEST_ID))
.andReturn(Optional.of(TaskStatus.running(TEST_ID)))
.anyTimes();
for (String testId : TEST_IDS) {
expect(taskInfoProvider.getTaskLocation(testId))
.andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT))
.anyTimes();
expect(taskInfoProvider.getTaskStatus(testId))
.andReturn(Optional.of(TaskStatus.running(testId)))
.anyTimes();
EasyMock.expect(taskInfoProvider.getTaskLocation(testId))
.andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT))
.anyTimes();
EasyMock.expect(taskInfoProvider.getTaskStatus(testId))
.andReturn(Optional.of(TaskStatus.running(testId)))
.anyTimes();
}
}
@ -141,22 +139,22 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
public void testNoTaskLocation() throws IOException
{
EasyMock.reset(taskInfoProvider);
expect(taskInfoProvider.getTaskLocation(TEST_ID)).andReturn(TaskLocation.unknown()).anyTimes();
expect(taskInfoProvider.getTaskStatus(TEST_ID))
.andReturn(Optional.of(TaskStatus.running(TEST_ID)))
.anyTimes();
EasyMock.expect(taskInfoProvider.getTaskLocation(TEST_ID)).andReturn(TaskLocation.unknown()).anyTimes();
EasyMock.expect(taskInfoProvider.getTaskStatus(TEST_ID))
.andReturn(Optional.of(TaskStatus.running(TEST_ID)))
.anyTimes();
replayAll();
Assert.assertEquals(false, client.stop(TEST_ID, true));
Assert.assertEquals(false, client.resume(TEST_ID));
Assert.assertFalse(client.stop(TEST_ID, true));
Assert.assertFalse(client.resume(TEST_ID));
Assert.assertEquals(ImmutableMap.of(), client.pause(TEST_ID));
Assert.assertEquals(ImmutableMap.of(), client.pause(TEST_ID));
Assert.assertEquals(Status.NOT_STARTED, client.getStatus(TEST_ID));
Assert.assertEquals(null, client.getStartTime(TEST_ID));
Assert.assertNull(client.getStartTime(TEST_ID));
Assert.assertEquals(ImmutableMap.of(), client.getCurrentOffsets(TEST_ID, true));
Assert.assertEquals(ImmutableMap.of(), client.getEndOffsets(TEST_ID));
Assert.assertEquals(false, client.setEndOffsets(TEST_ID, Collections.emptyMap(), true));
Assert.assertEquals(false, client.setEndOffsets(TEST_ID, Collections.emptyMap(), true));
Assert.assertFalse(client.setEndOffsets(TEST_ID, Collections.emptyMap(), true));
Assert.assertFalse(client.setEndOffsets(TEST_ID, Collections.emptyMap(), true));
verifyAll();
}
@ -168,12 +166,12 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
expectedException.expectMessage("Aborting request because task [test-id] is not runnable");
EasyMock.reset(taskInfoProvider);
expect(taskInfoProvider.getTaskLocation(TEST_ID))
.andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT))
.anyTimes();
expect(taskInfoProvider.getTaskStatus(TEST_ID))
.andReturn(Optional.of(TaskStatus.failure(TEST_ID)))
.anyTimes();
EasyMock.expect(taskInfoProvider.getTaskLocation(TEST_ID))
.andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT))
.anyTimes();
EasyMock.expect(taskInfoProvider.getTaskStatus(TEST_ID))
.andReturn(Optional.of(TaskStatus.failure(TEST_ID)))
.anyTimes();
replayAll();
client.getCurrentOffsets(TEST_ID, true);
@ -186,9 +184,9 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
expectedException.expect(RuntimeException.class);
expectedException.expectMessage("org.apache.druid.java.util.common.IOE: Received status [500] and content []");
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.INTERNAL_SERVER_ERROR).times(2);
expect(responseHolder.getContent()).andReturn("");
expect(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.INTERNAL_SERVER_ERROR).times(2);
EasyMock.expect(responseHolder.getContent()).andReturn("");
EasyMock.expect(
httpClient.go(
EasyMock.anyObject(Request.class),
EasyMock.anyObject(FullResponseHandler.class),
@ -209,9 +207,9 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
expectedException.expect(IAE.class);
expectedException.expectMessage("Received 400 Bad Request with body:");
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.BAD_REQUEST).times(2);
expect(responseHolder.getContent()).andReturn("");
expect(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.BAD_REQUEST).times(2);
EasyMock.expect(responseHolder.getContent()).andReturn("");
EasyMock.expect(
httpClient.go(
EasyMock.anyObject(Request.class),
EasyMock.anyObject(FullResponseHandler.class),
@ -229,14 +227,14 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
@Test
public void testTaskLocationMismatch()
{
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(3)
.andReturn(HttpResponseStatus.OK);
expect(responseHolder.getResponse()).andReturn(response);
expect(responseHolder.getContent()).andReturn("").times(2)
.andReturn("{}");
expect(response.headers()).andReturn(headers);
expect(headers.get("X-Druid-Task-Id")).andReturn("a-different-task-id");
expect(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(3)
.andReturn(HttpResponseStatus.OK);
EasyMock.expect(responseHolder.getResponse()).andReturn(response);
EasyMock.expect(responseHolder.getContent()).andReturn("").times(2)
.andReturn("{}");
EasyMock.expect(response.headers()).andReturn(headers);
EasyMock.expect(headers.get("X-Druid-Task-Id")).andReturn("a-different-task-id");
EasyMock.expect(
httpClient.go(
EasyMock.anyObject(Request.class),
EasyMock.anyObject(FullResponseHandler.class),
@ -257,9 +255,9 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
public void testGetCurrentOffsets() throws Exception
{
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK);
expect(responseHolder.getContent()).andReturn("{\"0\":1, \"1\":10}");
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK);
EasyMock.expect(responseHolder.getContent()).andReturn("{\"0\":1, \"1\":10}");
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -290,15 +288,15 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
client = new TestableKafkaIndexTaskClient(httpClient, objectMapper, taskInfoProvider, 3);
Capture<Request> captured = Capture.newInstance(CaptureType.ALL);
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(6)
.andReturn(HttpResponseStatus.OK).times(1);
expect(responseHolder.getContent()).andReturn("").times(4)
.andReturn("{\"0\":1, \"1\":10}");
expect(responseHolder.getResponse()).andReturn(response).times(2);
expect(response.headers()).andReturn(headers).times(2);
expect(headers.get("X-Druid-Task-Id")).andReturn(TEST_ID).times(2);
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(6)
.andReturn(HttpResponseStatus.OK).times(1);
EasyMock.expect(responseHolder.getContent()).andReturn("").times(4)
.andReturn("{\"0\":1, \"1\":10}");
EasyMock.expect(responseHolder.getResponse()).andReturn(response).times(2);
EasyMock.expect(response.headers()).andReturn(headers).times(2);
EasyMock.expect(headers.get("X-Druid-Task-Id")).andReturn(TEST_ID).times(2);
expect(httpClient.go(
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -334,13 +332,13 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
client = new TestableKafkaIndexTaskClient(httpClient, objectMapper, taskInfoProvider, 2);
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).anyTimes();
expect(responseHolder.getContent()).andReturn("").anyTimes();
expect(responseHolder.getResponse()).andReturn(response).anyTimes();
expect(response.headers()).andReturn(headers).anyTimes();
expect(headers.get("X-Druid-Task-Id")).andReturn(TEST_ID).anyTimes();
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).anyTimes();
EasyMock.expect(responseHolder.getContent()).andReturn("").anyTimes();
EasyMock.expect(responseHolder.getResponse()).andReturn(response).anyTimes();
EasyMock.expect(response.headers()).andReturn(headers).anyTimes();
EasyMock.expect(headers.get("X-Druid-Task-Id")).andReturn(TEST_ID).anyTimes();
expect(
EasyMock.expect(
httpClient.go(
EasyMock.anyObject(Request.class),
EasyMock.anyObject(FullResponseHandler.class),
@ -357,9 +355,9 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
public void testGetEndOffsets() throws Exception
{
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK);
expect(responseHolder.getContent()).andReturn("{\"0\":1, \"1\":10}");
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK);
EasyMock.expect(responseHolder.getContent()).andReturn("{\"0\":1, \"1\":10}");
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -391,13 +389,13 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
DateTime now = DateTimes.nowUtc();
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(3)
.andReturn(HttpResponseStatus.OK);
expect(responseHolder.getResponse()).andReturn(response);
expect(response.headers()).andReturn(headers);
expect(headers.get("X-Druid-Task-Id")).andReturn(null);
expect(responseHolder.getContent()).andReturn(String.valueOf(now.getMillis())).anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(3)
.andReturn(HttpResponseStatus.OK);
EasyMock.expect(responseHolder.getResponse()).andReturn(response);
EasyMock.expect(response.headers()).andReturn(headers);
EasyMock.expect(headers.get("X-Druid-Task-Id")).andReturn(null);
EasyMock.expect(responseHolder.getContent()).andReturn(String.valueOf(now.getMillis())).anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -426,9 +424,9 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
Status status = Status.READING;
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK);
expect(responseHolder.getContent()).andReturn(StringUtils.format("\"%s\"", status.toString())).anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK);
EasyMock.expect(responseHolder.getContent()).andReturn(StringUtils.format("\"%s\"", status.toString())).anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -455,9 +453,9 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
public void testPause() throws Exception
{
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).times(2);
expect(responseHolder.getContent()).andReturn("{\"0\":1, \"1\":10}").anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).times(2);
EasyMock.expect(responseHolder.getContent()).andReturn("{\"0\":1, \"1\":10}").anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -489,25 +487,25 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
Capture<Request> captured2 = Capture.newInstance();
Capture<Request> captured3 = Capture.newInstance();
// one time in IndexTaskClient.submitRequest() and another in KafkaIndexTaskClient.pause()
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.ACCEPTED).times(2)
.andReturn(HttpResponseStatus.OK).anyTimes();
expect(responseHolder.getContent()).andReturn("\"PAUSED\"").times(2)
.andReturn("{\"0\":1, \"1\":10}").anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.ACCEPTED).times(2)
.andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(responseHolder.getContent()).andReturn("\"PAUSED\"").times(2)
.andReturn("{\"0\":1, \"1\":10}").anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
)).andReturn(
Futures.immediateFuture(responseHolder)
);
expect(httpClient.go(
EasyMock.expect(httpClient.go(
EasyMock.capture(captured2),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
)).andReturn(
Futures.immediateFuture(responseHolder)
);
expect(httpClient.go(
EasyMock.expect(httpClient.go(
EasyMock.capture(captured3),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -551,8 +549,8 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
public void testResume() throws Exception
{
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -579,8 +577,8 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
Map<Integer, Long> endOffsets = ImmutableMap.of(0, 15L, 1, 120L);
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -608,8 +606,8 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
Map<Integer, Long> endOffsets = ImmutableMap.of(0, 15L, 1, 120L);
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -635,8 +633,8 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
public void testStop() throws Exception
{
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -661,8 +659,8 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
public void testStopAndPublish() throws Exception
{
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -688,8 +686,8 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
{
final int numRequests = TEST_IDS.size();
Capture<Request> captured = Capture.newInstance(CaptureType.ALL);
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -724,8 +722,8 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
{
final int numRequests = TEST_IDS.size();
Capture<Request> captured = Capture.newInstance(CaptureType.ALL);
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -760,9 +758,9 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
{
final int numRequests = TEST_IDS.size();
Capture<Request> captured = Capture.newInstance(CaptureType.ALL);
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
expect(responseHolder.getContent()).andReturn("{\"0\":\"1\"}").anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(responseHolder.getContent()).andReturn("{\"0\":\"1\"}").anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -797,9 +795,9 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
{
final int numRequests = TEST_IDS.size();
Capture<Request> captured = Capture.newInstance(CaptureType.ALL);
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
expect(responseHolder.getContent()).andReturn("\"READING\"").anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(responseHolder.getContent()).andReturn("\"READING\"").anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -835,9 +833,9 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
final DateTime now = DateTimes.nowUtc();
final int numRequests = TEST_IDS.size();
Capture<Request> captured = Capture.newInstance(CaptureType.ALL);
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
expect(responseHolder.getContent()).andReturn(String.valueOf(now.getMillis())).anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(responseHolder.getContent()).andReturn(String.valueOf(now.getMillis())).anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -872,9 +870,9 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
{
final int numRequests = TEST_IDS.size();
Capture<Request> captured = Capture.newInstance(CaptureType.ALL);
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
expect(responseHolder.getContent()).andReturn("{\"0\":\"1\"}").anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(responseHolder.getContent()).andReturn("{\"0\":\"1\"}").anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -909,9 +907,9 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
{
final int numRequests = TEST_IDS.size();
Capture<Request> captured = Capture.newInstance(CaptureType.ALL);
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
expect(responseHolder.getContent()).andReturn("{\"0\":\"1\"}").anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(responseHolder.getContent()).andReturn("{\"0\":\"1\"}").anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -947,8 +945,8 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
final Map<Integer, Long> endOffsets = ImmutableMap.of(0, 15L, 1, 120L);
final int numRequests = TEST_IDS.size();
Capture<Request> captured = Capture.newInstance(CaptureType.ALL);
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -990,8 +988,8 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
final Map<Integer, Long> endOffsets = ImmutableMap.of(0, 15L, 1, 120L);
final int numRequests = TEST_IDS.size();
Capture<Request> captured = Capture.newInstance(CaptureType.ALL);
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -1033,7 +1031,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
private class TestableKafkaIndexTaskClient extends KafkaIndexTaskClient
{
public TestableKafkaIndexTaskClient(
TestableKafkaIndexTaskClient(
HttpClient httpClient,
ObjectMapper jsonMapper,
TaskInfoProvider taskInfoProvider
@ -1042,7 +1040,7 @@ public class KafkaIndexTaskClientTest extends EasyMockSupport
this(httpClient, jsonMapper, taskInfoProvider, TEST_NUM_RETRIES);
}
public TestableKafkaIndexTaskClient(
TestableKafkaIndexTaskClient(
HttpClient httpClient,
ObjectMapper jsonMapper,
TaskInfoProvider taskInfoProvider,

View File

@ -23,7 +23,6 @@ import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.core.type.TypeReference;
import com.fasterxml.jackson.databind.Module;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.google.common.base.Predicate;
import com.google.common.base.Predicates;
import com.google.common.collect.FluentIterable;
import com.google.common.collect.ImmutableList;
@ -106,11 +105,11 @@ import org.apache.druid.query.DefaultQueryRunnerFactoryConglomerate;
import org.apache.druid.query.Druids;
import org.apache.druid.query.IntervalChunkingQueryRunnerDecorator;
import org.apache.druid.query.Query;
import org.apache.druid.query.QueryPlus;
import org.apache.druid.query.QueryRunner;
import org.apache.druid.query.QueryRunnerFactory;
import org.apache.druid.query.QueryRunnerFactoryConglomerate;
import org.apache.druid.query.QueryToolChest;
import org.apache.druid.query.QueryWatcher;
import org.apache.druid.query.Result;
import org.apache.druid.query.SegmentDescriptor;
import org.apache.druid.query.aggregation.AggregatorFactory;
@ -185,8 +184,6 @@ import java.util.concurrent.Executors;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.TimeoutException;
import static org.apache.druid.query.QueryPlus.wrap;
public class KafkaIndexTaskTest
{
private static final Logger log = new Logger(KafkaIndexTaskTest.class);
@ -663,7 +660,7 @@ public class KafkaIndexTaskTest
final Map<Integer, Long> nextOffsets = ImmutableMap.copyOf(task.getRunner().getCurrentOffsets());
Assert.assertTrue(checkpoint2.getPartitionSequenceNumberMap().equals(nextOffsets));
Assert.assertEquals(checkpoint2.getPartitionSequenceNumberMap(), nextOffsets);
task.getRunner().setEndOffsets(nextOffsets, false);
Assert.assertEquals(TaskState.SUCCESS, future.get().getStatusCode());
@ -783,7 +780,7 @@ public class KafkaIndexTaskTest
Thread.sleep(10);
}
final Map<Integer, Long> currentOffsets = ImmutableMap.copyOf(task.getRunner().getCurrentOffsets());
Assert.assertTrue(checkpoint.getPartitionSequenceNumberMap().equals(currentOffsets));
Assert.assertEquals(checkpoint.getPartitionSequenceNumberMap(), currentOffsets);
task.getRunner().setEndOffsets(currentOffsets, false);
Assert.assertEquals(TaskState.SUCCESS, future.get().getStatusCode());
@ -1287,7 +1284,7 @@ public class KafkaIndexTaskTest
// Wait for task to exit
Assert.assertEquals(TaskState.SUCCESS, status.getStatusCode());
Assert.assertEquals(null, status.getErrorMsg());
Assert.assertNull(status.getErrorMsg());
// Check metrics
Assert.assertEquals(4, task.getRunner().getRowIngestionMeters().getProcessed());
@ -2079,9 +2076,9 @@ public class KafkaIndexTaskTest
}
for (int i = 0; i < 5; i++) {
Assert.assertEquals(task.getRunner().getStatus(), Status.READING);
Assert.assertEquals(Status.READING, task.getRunner().getStatus());
// Offset should not be reset
Assert.assertTrue(task.getRunner().getCurrentOffsets().get(0) == 200L);
Assert.assertEquals(200L, (long) task.getRunner().getCurrentOffsets().get(0));
}
}
@ -2362,9 +2359,7 @@ public class KafkaIndexTaskTest
{
ScanQuery query = new Druids.ScanQueryBuilder().dataSource(
DATA_SCHEMA.getDataSource()).intervals(spec).build();
List<ScanResultValue> results =
task.getQueryRunner(query).run(wrap(query), new HashMap<>()).toList();
return results;
return task.getQueryRunner(query).run(QueryPlus.wrap(query), new HashMap<>()).toList();
}
private void insertData() throws ExecutionException, InterruptedException
@ -2381,6 +2376,7 @@ public class KafkaIndexTaskTest
private ListenableFuture<TaskStatus> runTask(final Task task)
{
//noinspection CatchMayIgnoreException
try {
taskStorage.insert(task, TaskStatus.running(task.getId()));
}
@ -2413,14 +2409,7 @@ public class KafkaIndexTaskTest
{
return Iterables.find(
taskLockbox.findLocksForTask(task),
new Predicate<TaskLock>()
{
@Override
public boolean apply(TaskLock lock)
{
return lock.getInterval().contains(interval);
}
}
lock -> lock.getInterval().contains(interval)
);
}
@ -2535,13 +2524,8 @@ public class KafkaIndexTaskTest
new TimeseriesQueryRunnerFactory(
new TimeseriesQueryQueryToolChest(queryRunnerDecorator),
new TimeseriesQueryEngine(),
new QueryWatcher()
{
@Override
public void registerQuery(Query query, ListenableFuture future)
{
// do nothing
}
(query, future) -> {
// do nothing
}
)
)
@ -2773,7 +2757,7 @@ public class KafkaIndexTaskTest
return values;
}
public long countEvents(final Task task)
private long countEvents(final Task task)
{
// Do a query.
TimeseriesQuery query = Druids.newTimeseriesQueryBuilder()
@ -2787,7 +2771,7 @@ public class KafkaIndexTaskTest
.build();
List<Result<TimeseriesResultValue>> results =
task.getQueryRunner(query).run(wrap(query), ImmutableMap.of()).toList();
task.getQueryRunner(query).run(QueryPlus.wrap(query), ImmutableMap.of()).toList();
return results.isEmpty() ? 0L : DimensionHandlerUtils.nullToZero(results.get(0).getValue().getLongMetric("rows"));
}

View File

@ -66,8 +66,6 @@ import java.util.Collections;
import java.util.List;
import java.util.Map;
import static org.easymock.EasyMock.expect;
@RunWith(Parameterized.class)
public class KinesisIndexTaskClientTest extends EasyMockSupport
@ -86,7 +84,7 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
private static final long TEST_NUM_RETRIES = 0;
private static final String URL_FORMATTER = "http://%s:%d/druid/worker/v1/chat/%s/%s";
private int numThreads;
private final int numThreads;
private HttpClient httpClient;
private TaskInfoProvider taskInfoProvider;
private FullResponseHolder responseHolder;
@ -115,20 +113,20 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
headers = createMock(HttpHeaders.class);
client = new TestableKinesisIndexTaskClient(httpClient, objectMapper, taskInfoProvider);
expect(taskInfoProvider.getTaskLocation(TEST_ID))
.andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT))
.anyTimes();
expect(taskInfoProvider.getTaskStatus(TEST_ID))
.andReturn(Optional.of(TaskStatus.running(TEST_ID)))
.anyTimes();
EasyMock.expect(taskInfoProvider.getTaskLocation(TEST_ID))
.andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT))
.anyTimes();
EasyMock.expect(taskInfoProvider.getTaskStatus(TEST_ID))
.andReturn(Optional.of(TaskStatus.running(TEST_ID)))
.anyTimes();
for (String testId : TEST_IDS) {
expect(taskInfoProvider.getTaskLocation(testId))
.andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT))
.anyTimes();
expect(taskInfoProvider.getTaskStatus(testId))
.andReturn(Optional.of(TaskStatus.running(testId)))
.anyTimes();
EasyMock.expect(taskInfoProvider.getTaskLocation(testId))
.andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT))
.anyTimes();
EasyMock.expect(taskInfoProvider.getTaskStatus(testId))
.andReturn(Optional.of(TaskStatus.running(testId)))
.anyTimes();
}
}
@ -142,10 +140,10 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
public void testNoTaskLocation() throws IOException
{
EasyMock.reset(taskInfoProvider);
expect(taskInfoProvider.getTaskLocation(TEST_ID)).andReturn(TaskLocation.unknown()).anyTimes();
expect(taskInfoProvider.getTaskStatus(TEST_ID))
.andReturn(Optional.of(TaskStatus.running(TEST_ID)))
.anyTimes();
EasyMock.expect(taskInfoProvider.getTaskLocation(TEST_ID)).andReturn(TaskLocation.unknown()).anyTimes();
EasyMock.expect(taskInfoProvider.getTaskStatus(TEST_ID))
.andReturn(Optional.of(TaskStatus.running(TEST_ID)))
.anyTimes();
replayAll();
Assert.assertFalse(client.stop(TEST_ID, true));
@ -169,12 +167,12 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
expectedException.expectMessage("Aborting request because task [test-id] is not runnable");
EasyMock.reset(taskInfoProvider);
expect(taskInfoProvider.getTaskLocation(TEST_ID))
.andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT))
.anyTimes();
expect(taskInfoProvider.getTaskStatus(TEST_ID))
.andReturn(Optional.of(TaskStatus.failure(TEST_ID)))
.anyTimes();
EasyMock.expect(taskInfoProvider.getTaskLocation(TEST_ID))
.andReturn(new TaskLocation(TEST_HOST, TEST_PORT, TEST_TLS_PORT))
.anyTimes();
EasyMock.expect(taskInfoProvider.getTaskStatus(TEST_ID))
.andReturn(Optional.of(TaskStatus.failure(TEST_ID)))
.anyTimes();
replayAll();
client.getCurrentOffsets(TEST_ID, true);
@ -187,9 +185,9 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
expectedException.expect(RuntimeException.class);
expectedException.expectMessage("org.apache.druid.java.util.common.IOE: Received status [500] and content []");
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.INTERNAL_SERVER_ERROR).times(2);
expect(responseHolder.getContent()).andReturn("");
expect(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.INTERNAL_SERVER_ERROR).times(2);
EasyMock.expect(responseHolder.getContent()).andReturn("");
EasyMock.expect(
httpClient.go(
EasyMock.anyObject(Request.class),
EasyMock.anyObject(FullResponseHandler.class),
@ -210,9 +208,9 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
expectedException.expect(IAE.class);
expectedException.expectMessage("Received 400 Bad Request with body:");
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.BAD_REQUEST).times(2);
expect(responseHolder.getContent()).andReturn("");
expect(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.BAD_REQUEST).times(2);
EasyMock.expect(responseHolder.getContent()).andReturn("");
EasyMock.expect(
httpClient.go(
EasyMock.anyObject(Request.class),
EasyMock.anyObject(FullResponseHandler.class),
@ -230,14 +228,14 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
@Test
public void testTaskLocationMismatch()
{
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(3)
.andReturn(HttpResponseStatus.OK);
expect(responseHolder.getResponse()).andReturn(response);
expect(responseHolder.getContent()).andReturn("").times(2)
.andReturn("{}");
expect(response.headers()).andReturn(headers);
expect(headers.get("X-Druid-Task-Id")).andReturn("a-different-task-id");
expect(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(3)
.andReturn(HttpResponseStatus.OK);
EasyMock.expect(responseHolder.getResponse()).andReturn(response);
EasyMock.expect(responseHolder.getContent()).andReturn("").times(2)
.andReturn("{}");
EasyMock.expect(response.headers()).andReturn(headers);
EasyMock.expect(headers.get("X-Druid-Task-Id")).andReturn("a-different-task-id");
EasyMock.expect(
httpClient.go(
EasyMock.anyObject(Request.class),
EasyMock.anyObject(FullResponseHandler.class),
@ -258,9 +256,9 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
public void testGetCurrentOffsets() throws Exception
{
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK);
expect(responseHolder.getContent()).andReturn("{\"0\":1, \"1\":10}");
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK);
EasyMock.expect(responseHolder.getContent()).andReturn("{\"0\":1, \"1\":10}");
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -291,15 +289,15 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
client = new TestableKinesisIndexTaskClient(httpClient, objectMapper, taskInfoProvider, 3);
Capture<Request> captured = Capture.newInstance(CaptureType.ALL);
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(6)
.andReturn(HttpResponseStatus.OK).times(1);
expect(responseHolder.getContent()).andReturn("").times(4)
.andReturn("{\"0\":1, \"1\":10}");
expect(responseHolder.getResponse()).andReturn(response).times(2);
expect(response.headers()).andReturn(headers).times(2);
expect(headers.get("X-Druid-Task-Id")).andReturn(TEST_ID).times(2);
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(6)
.andReturn(HttpResponseStatus.OK).times(1);
EasyMock.expect(responseHolder.getContent()).andReturn("").times(4)
.andReturn("{\"0\":1, \"1\":10}");
EasyMock.expect(responseHolder.getResponse()).andReturn(response).times(2);
EasyMock.expect(response.headers()).andReturn(headers).times(2);
EasyMock.expect(headers.get("X-Druid-Task-Id")).andReturn(TEST_ID).times(2);
expect(httpClient.go(
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -335,13 +333,13 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
client = new TestableKinesisIndexTaskClient(httpClient, objectMapper, taskInfoProvider, 2);
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).anyTimes();
expect(responseHolder.getContent()).andReturn("").anyTimes();
expect(responseHolder.getResponse()).andReturn(response).anyTimes();
expect(response.headers()).andReturn(headers).anyTimes();
expect(headers.get("X-Druid-Task-Id")).andReturn(TEST_ID).anyTimes();
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).anyTimes();
EasyMock.expect(responseHolder.getContent()).andReturn("").anyTimes();
EasyMock.expect(responseHolder.getResponse()).andReturn(response).anyTimes();
EasyMock.expect(response.headers()).andReturn(headers).anyTimes();
EasyMock.expect(headers.get("X-Druid-Task-Id")).andReturn(TEST_ID).anyTimes();
expect(
EasyMock.expect(
httpClient.go(
EasyMock.anyObject(Request.class),
EasyMock.anyObject(FullResponseHandler.class),
@ -358,9 +356,9 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
public void testGetEndOffsets() throws Exception
{
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK);
expect(responseHolder.getContent()).andReturn("{\"0\":1, \"1\":10}");
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK);
EasyMock.expect(responseHolder.getContent()).andReturn("{\"0\":1, \"1\":10}");
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -392,13 +390,13 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
DateTime now = DateTimes.nowUtc();
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(3)
.andReturn(HttpResponseStatus.OK);
expect(responseHolder.getResponse()).andReturn(response);
expect(response.headers()).andReturn(headers);
expect(headers.get("X-Druid-Task-Id")).andReturn(null);
expect(responseHolder.getContent()).andReturn(String.valueOf(now.getMillis())).anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.NOT_FOUND).times(3)
.andReturn(HttpResponseStatus.OK);
EasyMock.expect(responseHolder.getResponse()).andReturn(response);
EasyMock.expect(response.headers()).andReturn(headers);
EasyMock.expect(headers.get("X-Druid-Task-Id")).andReturn(null);
EasyMock.expect(responseHolder.getContent()).andReturn(String.valueOf(now.getMillis())).anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -427,9 +425,9 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
Status status = Status.READING;
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK);
expect(responseHolder.getContent()).andReturn(StringUtils.format("\"%s\"", status.toString())).anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK);
EasyMock.expect(responseHolder.getContent()).andReturn(StringUtils.format("\"%s\"", status.toString())).anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -456,9 +454,9 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
public void testPause() throws Exception
{
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).times(2);
expect(responseHolder.getContent()).andReturn("{\"0\":1, \"1\":10}").anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).times(2);
EasyMock.expect(responseHolder.getContent()).andReturn("{\"0\":1, \"1\":10}").anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -489,25 +487,25 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
Capture<Request> captured = Capture.newInstance();
Capture<Request> captured2 = Capture.newInstance();
Capture<Request> captured3 = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.ACCEPTED).times(2)
.andReturn(HttpResponseStatus.OK).anyTimes();
expect(responseHolder.getContent()).andReturn("\"PAUSED\"").times(2)
.andReturn("{\"0\":1, \"1\":10}").anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.ACCEPTED).times(2)
.andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(responseHolder.getContent()).andReturn("\"PAUSED\"").times(2)
.andReturn("{\"0\":1, \"1\":10}").anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
)).andReturn(
Futures.immediateFuture(responseHolder)
);
expect(httpClient.go(
EasyMock.expect(httpClient.go(
EasyMock.capture(captured2),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
)).andReturn(
Futures.immediateFuture(responseHolder)
);
expect(httpClient.go(
EasyMock.expect(httpClient.go(
EasyMock.capture(captured3),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -551,8 +549,8 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
public void testResume() throws Exception
{
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -579,8 +577,8 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
Map<String, String> endOffsets = ImmutableMap.of("0", "15", "1", "120");
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -608,8 +606,8 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
Map<String, String> endOffsets = ImmutableMap.of("0", "15", "1", "120");
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -635,8 +633,8 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
public void testStop() throws Exception
{
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -661,8 +659,8 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
public void testStopAndPublish() throws Exception
{
Capture<Request> captured = Capture.newInstance();
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -688,8 +686,8 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
{
final int numRequests = TEST_IDS.size();
Capture<Request> captured = Capture.newInstance(CaptureType.ALL);
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -724,8 +722,8 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
{
final int numRequests = TEST_IDS.size();
Capture<Request> captured = Capture.newInstance(CaptureType.ALL);
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -760,9 +758,9 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
{
final int numRequests = TEST_IDS.size();
Capture<Request> captured = Capture.newInstance(CaptureType.ALL);
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
expect(responseHolder.getContent()).andReturn("{\"0\":\"1\"}").anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(responseHolder.getContent()).andReturn("{\"0\":\"1\"}").anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -797,9 +795,9 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
{
final int numRequests = TEST_IDS.size();
Capture<Request> captured = Capture.newInstance(CaptureType.ALL);
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
expect(responseHolder.getContent()).andReturn("\"READING\"").anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(responseHolder.getContent()).andReturn("\"READING\"").anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -835,9 +833,9 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
final DateTime now = DateTimes.nowUtc();
final int numRequests = TEST_IDS.size();
Capture<Request> captured = Capture.newInstance(CaptureType.ALL);
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
expect(responseHolder.getContent()).andReturn(String.valueOf(now.getMillis())).anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(responseHolder.getContent()).andReturn(String.valueOf(now.getMillis())).anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -872,9 +870,9 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
{
final int numRequests = TEST_IDS.size();
Capture<Request> captured = Capture.newInstance(CaptureType.ALL);
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
expect(responseHolder.getContent()).andReturn("{\"0\":\"1\"}").anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(responseHolder.getContent()).andReturn("{\"0\":\"1\"}").anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -909,9 +907,9 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
{
final int numRequests = TEST_IDS.size();
Capture<Request> captured = Capture.newInstance(CaptureType.ALL);
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
expect(responseHolder.getContent()).andReturn("{\"0\":\"1\"}").anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(responseHolder.getContent()).andReturn("{\"0\":\"1\"}").anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -947,8 +945,8 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
Map<String, String> endOffsets = ImmutableMap.of("0", "15L", "1", "120L");
final int numRequests = TEST_IDS.size();
Capture<Request> captured = Capture.newInstance(CaptureType.ALL);
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)
@ -990,8 +988,8 @@ public class KinesisIndexTaskClientTest extends EasyMockSupport
Map<String, String> endOffsets = ImmutableMap.of("0", "15L", "1", "120L");
final int numRequests = TEST_IDS.size();
Capture<Request> captured = Capture.newInstance(CaptureType.ALL);
expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
expect(httpClient.go(
EasyMock.expect(responseHolder.getStatus()).andReturn(HttpResponseStatus.OK).anyTimes();
EasyMock.expect(httpClient.go(
EasyMock.capture(captured),
EasyMock.anyObject(FullResponseHandler.class),
EasyMock.eq(TEST_HTTP_TIMEOUT)

View File

@ -38,6 +38,7 @@ import org.apache.druid.indexing.seekablestream.common.StreamPartition;
import org.apache.druid.java.util.common.ISE;
import org.apache.druid.java.util.common.StringUtils;
import org.easymock.Capture;
import org.easymock.EasyMock;
import org.easymock.EasyMockSupport;
import org.junit.After;
import org.junit.Assert;
@ -50,34 +51,15 @@ import java.util.List;
import java.util.Set;
import java.util.stream.Collectors;
import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.anyString;
import static org.easymock.EasyMock.capture;
import static org.easymock.EasyMock.eq;
import static org.easymock.EasyMock.expect;
public class KinesisRecordSupplierTest extends EasyMockSupport
{
private static final String stream = "stream";
private static long poll_timeout_millis = 2000;
private static int recordsPerFetch;
private static String shardId1 = "1";
private static String shardId0 = "0";
private static String shard1Iterator = "1";
private static String shard0Iterator = "0";
private static AmazonKinesis kinesis;
private static DescribeStreamResult describeStreamResult0;
private static DescribeStreamResult describeStreamResult1;
private static GetShardIteratorResult getShardIteratorResult0;
private static GetShardIteratorResult getShardIteratorResult1;
private static GetRecordsResult getRecordsResult0;
private static GetRecordsResult getRecordsResult1;
private static StreamDescription streamDescription0;
private static StreamDescription streamDescription1;
private static Shard shard0;
private static Shard shard1;
private static KinesisRecordSupplier recordSupplier;
private static List<Record> shard1Records = ImmutableList.of(
private static final long POLL_TIMEOUT_MILLIS = 2000;
private static final String SHARD_ID1 = "1";
private static final String SHARD_ID0 = "0";
private static final String SHARD1_ITERATOR = "1";
private static final String SHARD0_ITERATOR = "0";
private static final List<Record> SHARD1_RECORDS = ImmutableList.of(
new Record().withData(jb("2011", "d", "y", "10", "20.0", "1.0")).withSequenceNumber("0"),
new Record().withData(jb("2011", "e", "y", "10", "20.0", "1.0")).withSequenceNumber("1"),
new Record().withData(jb("246140482-04-24T15:36:27.903Z", "x", "z", "10", "20.0", "1.0")).withSequenceNumber("2"),
@ -89,38 +71,38 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
new Record().withData(jb("2012", "g", "y", "10", "20.0", "1.0")).withSequenceNumber("8"),
new Record().withData(jb("2011", "h", "y", "10", "20.0", "1.0")).withSequenceNumber("9")
);
private static List<Record> shard0Records = ImmutableList.of(
private static final List<Record> SHARD0_RECORDS = ImmutableList.of(
new Record().withData(jb("2008", "a", "y", "10", "20.0", "1.0")).withSequenceNumber("0"),
new Record().withData(jb("2009", "b", "y", "10", "20.0", "1.0")).withSequenceNumber("1")
);
private static List<Object> allRecords = ImmutableList.builder()
.addAll(shard0Records.stream()
.map(x -> new OrderedPartitionableRecord<>(
stream,
shardId0,
x.getSequenceNumber(),
Collections
.singletonList(
toByteArray(
x.getData()))
))
.collect(
Collectors
.toList()))
.addAll(shard1Records.stream()
.map(x -> new OrderedPartitionableRecord<>(
stream,
shardId1,
x.getSequenceNumber(),
Collections
.singletonList(
toByteArray(
x.getData()))
))
.collect(
Collectors
.toList()))
.build();
private static final List<Object> ALL_RECORDS = ImmutableList.builder()
.addAll(SHARD0_RECORDS.stream()
.map(x -> new OrderedPartitionableRecord<>(
stream,
SHARD_ID0,
x.getSequenceNumber(),
Collections
.singletonList(
toByteArray(
x.getData()))
))
.collect(
Collectors
.toList()))
.addAll(SHARD1_RECORDS.stream()
.map(x -> new OrderedPartitionableRecord<>(
stream,
SHARD_ID1,
x.getSequenceNumber(),
Collections
.singletonList(
toByteArray(
x.getData()))
))
.collect(
Collectors
.toList()))
.build();
private static ByteBuffer jb(String timestamp, String dim1, String dim2, String dimLong, String dimFloat, String met1)
{
@ -141,6 +123,20 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
}
}
private static int recordsPerFetch;
private static AmazonKinesis kinesis;
private static DescribeStreamResult describeStreamResult0;
private static DescribeStreamResult describeStreamResult1;
private static GetShardIteratorResult getShardIteratorResult0;
private static GetShardIteratorResult getShardIteratorResult1;
private static GetRecordsResult getRecordsResult0;
private static GetRecordsResult getRecordsResult1;
private static StreamDescription streamDescription0;
private static StreamDescription streamDescription1;
private static Shard shard0;
private static Shard shard1;
private static KinesisRecordSupplier recordSupplier;
@Before
public void setupTest()
{
@ -170,22 +166,24 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
{
final Capture<DescribeStreamRequest> capturedRequest = Capture.newInstance();
expect(kinesis.describeStream(capture(capturedRequest))).andReturn(describeStreamResult0).once();
expect(describeStreamResult0.getStreamDescription()).andReturn(streamDescription0).once();
expect(streamDescription0.getShards()).andReturn(ImmutableList.of(shard0)).once();
expect(streamDescription0.isHasMoreShards()).andReturn(true).once();
expect(shard0.getShardId()).andReturn(shardId0).times(2);
expect(kinesis.describeStream(anyObject(DescribeStreamRequest.class))).andReturn(describeStreamResult1).once();
expect(describeStreamResult1.getStreamDescription()).andReturn(streamDescription1).once();
expect(streamDescription1.getShards()).andReturn(ImmutableList.of(shard1)).once();
expect(streamDescription1.isHasMoreShards()).andReturn(false).once();
expect(shard1.getShardId()).andReturn(shardId1).once();
EasyMock.expect(kinesis.describeStream(EasyMock.capture(capturedRequest))).andReturn(describeStreamResult0).once();
EasyMock.expect(describeStreamResult0.getStreamDescription()).andReturn(streamDescription0).once();
EasyMock.expect(streamDescription0.getShards()).andReturn(ImmutableList.of(shard0)).once();
EasyMock.expect(streamDescription0.isHasMoreShards()).andReturn(true).once();
EasyMock.expect(shard0.getShardId()).andReturn(SHARD_ID0).times(2);
EasyMock.expect(kinesis.describeStream(EasyMock.anyObject(DescribeStreamRequest.class)))
.andReturn(describeStreamResult1)
.once();
EasyMock.expect(describeStreamResult1.getStreamDescription()).andReturn(streamDescription1).once();
EasyMock.expect(streamDescription1.getShards()).andReturn(ImmutableList.of(shard1)).once();
EasyMock.expect(streamDescription1.isHasMoreShards()).andReturn(false).once();
EasyMock.expect(shard1.getShardId()).andReturn(SHARD_ID1).once();
replayAll();
Set<StreamPartition<String>> partitions = ImmutableSet.of(
StreamPartition.of(stream, shardId0),
StreamPartition.of(stream, shardId1)
StreamPartition.of(stream, SHARD_ID0),
StreamPartition.of(stream, SHARD_ID1)
);
recordSupplier = new KinesisRecordSupplier(
@ -206,7 +204,7 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
recordSupplier.assign(partitions);
Assert.assertEquals(partitions, recordSupplier.getAssignment());
Assert.assertEquals(ImmutableSet.of(shardId1, shardId0), recordSupplier.getPartitionIds(stream));
Assert.assertEquals(ImmutableSet.of(SHARD_ID1, SHARD_ID0), recordSupplier.getPartitionIds(stream));
Assert.assertEquals(Collections.emptyList(), recordSupplier.poll(100));
verifyAll();
@ -236,38 +234,40 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
{
recordsPerFetch = 100;
expect(kinesis.getShardIterator(
anyObject(),
eq(shardId0),
anyString(),
anyString()
EasyMock.expect(kinesis.getShardIterator(
EasyMock.anyObject(),
EasyMock.eq(SHARD_ID0),
EasyMock.anyString(),
EasyMock.anyString()
)).andReturn(
getShardIteratorResult0).anyTimes();
expect(kinesis.getShardIterator(
anyObject(),
eq(shardId1),
anyString(),
anyString()
EasyMock.expect(kinesis.getShardIterator(
EasyMock.anyObject(),
EasyMock.eq(SHARD_ID1),
EasyMock.anyString(),
EasyMock.anyString()
)).andReturn(
getShardIteratorResult1).anyTimes();
expect(getShardIteratorResult0.getShardIterator()).andReturn(shard0Iterator).anyTimes();
expect(getShardIteratorResult1.getShardIterator()).andReturn(shard1Iterator).anyTimes();
expect(kinesis.getRecords(generateGetRecordsReq(shard0Iterator, recordsPerFetch))).andReturn(getRecordsResult0)
.anyTimes();
expect(kinesis.getRecords(generateGetRecordsReq(shard1Iterator, recordsPerFetch))).andReturn(getRecordsResult1)
.anyTimes();
expect(getRecordsResult0.getRecords()).andReturn(shard0Records).once();
expect(getRecordsResult1.getRecords()).andReturn(shard1Records).once();
expect(getRecordsResult0.getNextShardIterator()).andReturn(null).anyTimes();
expect(getRecordsResult1.getNextShardIterator()).andReturn(null).anyTimes();
EasyMock.expect(getShardIteratorResult0.getShardIterator()).andReturn(SHARD0_ITERATOR).anyTimes();
EasyMock.expect(getShardIteratorResult1.getShardIterator()).andReturn(SHARD1_ITERATOR).anyTimes();
EasyMock.expect(kinesis.getRecords(generateGetRecordsReq(SHARD0_ITERATOR, recordsPerFetch)))
.andReturn(getRecordsResult0)
.anyTimes();
EasyMock.expect(kinesis.getRecords(generateGetRecordsReq(SHARD1_ITERATOR, recordsPerFetch)))
.andReturn(getRecordsResult1)
.anyTimes();
EasyMock.expect(getRecordsResult0.getRecords()).andReturn(SHARD0_RECORDS).once();
EasyMock.expect(getRecordsResult1.getRecords()).andReturn(SHARD1_RECORDS).once();
EasyMock.expect(getRecordsResult0.getNextShardIterator()).andReturn(null).anyTimes();
EasyMock.expect(getRecordsResult1.getNextShardIterator()).andReturn(null).anyTimes();
replayAll();
Set<StreamPartition<String>> partitions = ImmutableSet.of(
StreamPartition.of(stream, shardId0),
StreamPartition.of(stream, shardId1)
StreamPartition.of(stream, SHARD_ID0),
StreamPartition.of(stream, SHARD_ID1)
);
@ -293,12 +293,12 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
}
List<OrderedPartitionableRecord<String, String>> polledRecords = cleanRecords(recordSupplier.poll(
poll_timeout_millis));
POLL_TIMEOUT_MILLIS));
verifyAll();
Assert.assertEquals(partitions, recordSupplier.getAssignment());
Assert.assertTrue(polledRecords.containsAll(allRecords));
Assert.assertTrue(polledRecords.containsAll(ALL_RECORDS));
}
@Test
@ -307,37 +307,39 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
{
recordsPerFetch = 100;
expect(kinesis.getShardIterator(
anyObject(),
eq(shardId0),
anyString(),
anyString()
EasyMock.expect(kinesis.getShardIterator(
EasyMock.anyObject(),
EasyMock.eq(SHARD_ID0),
EasyMock.anyString(),
EasyMock.anyString()
)).andReturn(
getShardIteratorResult0).anyTimes();
expect(kinesis.getShardIterator(
anyObject(),
eq(shardId1),
anyString(),
anyString()
EasyMock.expect(kinesis.getShardIterator(
EasyMock.anyObject(),
EasyMock.eq(SHARD_ID1),
EasyMock.anyString(),
EasyMock.anyString()
)).andReturn(
getShardIteratorResult1).anyTimes();
expect(getShardIteratorResult0.getShardIterator()).andReturn(shard0Iterator).anyTimes();
expect(getShardIteratorResult1.getShardIterator()).andReturn(shard1Iterator).anyTimes();
expect(kinesis.getRecords(generateGetRecordsReq(shard0Iterator, recordsPerFetch))).andReturn(getRecordsResult0)
.anyTimes();
expect(kinesis.getRecords(generateGetRecordsReq(shard1Iterator, recordsPerFetch))).andReturn(getRecordsResult1)
.anyTimes();
expect(getRecordsResult0.getRecords()).andReturn(shard0Records.subList(1, shard0Records.size())).once();
expect(getRecordsResult1.getRecords()).andReturn(shard1Records.subList(2, shard1Records.size())).once();
expect(getRecordsResult0.getNextShardIterator()).andReturn(null).anyTimes();
expect(getRecordsResult1.getNextShardIterator()).andReturn(null).anyTimes();
EasyMock.expect(getShardIteratorResult0.getShardIterator()).andReturn(SHARD0_ITERATOR).anyTimes();
EasyMock.expect(getShardIteratorResult1.getShardIterator()).andReturn(SHARD1_ITERATOR).anyTimes();
EasyMock.expect(kinesis.getRecords(generateGetRecordsReq(SHARD0_ITERATOR, recordsPerFetch)))
.andReturn(getRecordsResult0)
.anyTimes();
EasyMock.expect(kinesis.getRecords(generateGetRecordsReq(SHARD1_ITERATOR, recordsPerFetch)))
.andReturn(getRecordsResult1)
.anyTimes();
EasyMock.expect(getRecordsResult0.getRecords()).andReturn(SHARD0_RECORDS.subList(1, SHARD0_RECORDS.size())).once();
EasyMock.expect(getRecordsResult1.getRecords()).andReturn(SHARD1_RECORDS.subList(2, SHARD1_RECORDS.size())).once();
EasyMock.expect(getRecordsResult0.getNextShardIterator()).andReturn(null).anyTimes();
EasyMock.expect(getRecordsResult1.getNextShardIterator()).andReturn(null).anyTimes();
replayAll();
StreamPartition<String> shard0Partition = StreamPartition.of(stream, shardId0);
StreamPartition<String> shard1Partition = StreamPartition.of(stream, shardId1);
StreamPartition<String> shard0Partition = StreamPartition.of(stream, SHARD_ID0);
StreamPartition<String> shard1Partition = StreamPartition.of(stream, SHARD_ID1);
Set<StreamPartition<String>> partitions = ImmutableSet.of(
shard0Partition,
shard1Partition
@ -357,8 +359,8 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
);
recordSupplier.assign(partitions);
recordSupplier.seek(shard1Partition, shard1Records.get(2).getSequenceNumber());
recordSupplier.seek(shard0Partition, shard0Records.get(1).getSequenceNumber());
recordSupplier.seek(shard1Partition, SHARD1_RECORDS.get(2).getSequenceNumber());
recordSupplier.seek(shard0Partition, SHARD0_RECORDS.get(1).getSequenceNumber());
recordSupplier.start();
for (int i = 0; i < 10 && recordSupplier.bufferSize() < 9; i++) {
@ -366,12 +368,12 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
}
List<OrderedPartitionableRecord<String, String>> polledRecords = cleanRecords(recordSupplier.poll(
poll_timeout_millis));
POLL_TIMEOUT_MILLIS));
verifyAll();
Assert.assertEquals(9, polledRecords.size());
Assert.assertTrue(polledRecords.containsAll(allRecords.subList(4, 12)));
Assert.assertTrue(polledRecords.containsAll(allRecords.subList(1, 2)));
Assert.assertTrue(polledRecords.containsAll(ALL_RECORDS.subList(4, 12)));
Assert.assertTrue(polledRecords.containsAll(ALL_RECORDS.subList(1, 2)));
}
@ -382,29 +384,29 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
{
recordsPerFetch = 100;
expect(kinesis.getShardIterator(
anyObject(),
eq(shardId0),
anyString(),
anyString()
EasyMock.expect(kinesis.getShardIterator(
EasyMock.anyObject(),
EasyMock.eq(SHARD_ID0),
EasyMock.anyString(),
EasyMock.anyString()
)).andReturn(
getShardIteratorResult0).anyTimes();
expect(kinesis.getShardIterator(
anyObject(),
eq(shardId1),
anyString(),
anyString()
EasyMock.expect(kinesis.getShardIterator(
EasyMock.anyObject(),
EasyMock.eq(SHARD_ID1),
EasyMock.anyString(),
EasyMock.anyString()
)).andReturn(
getShardIteratorResult1).anyTimes();
expect(getShardIteratorResult0.getShardIterator()).andReturn(null).once();
expect(getShardIteratorResult1.getShardIterator()).andReturn(null).once();
EasyMock.expect(getShardIteratorResult0.getShardIterator()).andReturn(null).once();
EasyMock.expect(getShardIteratorResult1.getShardIterator()).andReturn(null).once();
replayAll();
StreamPartition<String> shard0 = StreamPartition.of(stream, shardId0);
StreamPartition<String> shard1 = StreamPartition.of(stream, shardId1);
StreamPartition<String> shard0 = StreamPartition.of(stream, SHARD_ID0);
StreamPartition<String> shard1 = StreamPartition.of(stream, SHARD_ID1);
Set<StreamPartition<String>> partitions = ImmutableSet.of(
shard0,
shard1
@ -430,7 +432,7 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
for (int i = 0; i < 10 && recordSupplier.bufferSize() < 2; i++) {
Thread.sleep(100);
}
Assert.assertEquals(Collections.emptyList(), cleanRecords(recordSupplier.poll(poll_timeout_millis)));
Assert.assertEquals(Collections.emptyList(), cleanRecords(recordSupplier.poll(POLL_TIMEOUT_MILLIS)));
verifyAll();
}
@ -438,8 +440,8 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
@Test(expected = ISE.class)
public void testSeekUnassigned() throws InterruptedException
{
StreamPartition<String> shard0 = StreamPartition.of(stream, shardId0);
StreamPartition<String> shard1 = StreamPartition.of(stream, shardId1);
StreamPartition<String> shard0 = StreamPartition.of(stream, SHARD_ID0);
StreamPartition<String> shard1 = StreamPartition.of(stream, SHARD_ID1);
Set<StreamPartition<String>> partitions = ImmutableSet.of(
shard1
);
@ -469,32 +471,39 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
// tests that after doing a seek, the now invalid records in buffer is cleaned up properly
recordsPerFetch = 100;
expect(kinesis.getShardIterator(
anyObject(),
eq(shardId1),
anyString(),
eq("5")
EasyMock.expect(kinesis.getShardIterator(
EasyMock.anyObject(),
EasyMock.eq(SHARD_ID1),
EasyMock.anyString(),
EasyMock.eq("5")
)).andReturn(
getShardIteratorResult1).once();
expect(kinesis.getShardIterator(anyObject(), eq(shardId1), anyString(), eq("7"))).andReturn(getShardIteratorResult0)
.once();
EasyMock.expect(kinesis.getShardIterator(
EasyMock.anyObject(),
EasyMock.eq(SHARD_ID1),
EasyMock.anyString(),
EasyMock.eq("7")
)).andReturn(getShardIteratorResult0)
.once();
expect(getShardIteratorResult1.getShardIterator()).andReturn(shard1Iterator).once();
expect(getShardIteratorResult0.getShardIterator()).andReturn(shard0Iterator).once();
expect(kinesis.getRecords(generateGetRecordsReq(shard1Iterator, recordsPerFetch))).andReturn(getRecordsResult1)
.once();
expect(kinesis.getRecords(generateGetRecordsReq(shard0Iterator, recordsPerFetch))).andReturn(getRecordsResult0)
.once();
expect(getRecordsResult1.getRecords()).andReturn(shard1Records.subList(5, shard1Records.size())).once();
expect(getRecordsResult0.getRecords()).andReturn(shard1Records.subList(7, shard1Records.size())).once();
expect(getRecordsResult1.getNextShardIterator()).andReturn(null).anyTimes();
expect(getRecordsResult0.getNextShardIterator()).andReturn(null).anyTimes();
EasyMock.expect(getShardIteratorResult1.getShardIterator()).andReturn(SHARD1_ITERATOR).once();
EasyMock.expect(getShardIteratorResult0.getShardIterator()).andReturn(SHARD0_ITERATOR).once();
EasyMock.expect(kinesis.getRecords(generateGetRecordsReq(SHARD1_ITERATOR, recordsPerFetch)))
.andReturn(getRecordsResult1)
.once();
EasyMock.expect(kinesis.getRecords(generateGetRecordsReq(SHARD0_ITERATOR, recordsPerFetch)))
.andReturn(getRecordsResult0)
.once();
EasyMock.expect(getRecordsResult1.getRecords()).andReturn(SHARD1_RECORDS.subList(5, SHARD1_RECORDS.size())).once();
EasyMock.expect(getRecordsResult0.getRecords()).andReturn(SHARD1_RECORDS.subList(7, SHARD1_RECORDS.size())).once();
EasyMock.expect(getRecordsResult1.getNextShardIterator()).andReturn(null).anyTimes();
EasyMock.expect(getRecordsResult0.getNextShardIterator()).andReturn(null).anyTimes();
replayAll();
Set<StreamPartition<String>> partitions = ImmutableSet.of(
StreamPartition.of(stream, shardId1)
StreamPartition.of(stream, SHARD_ID1)
);
recordSupplier = new KinesisRecordSupplier(
@ -511,30 +520,30 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
);
recordSupplier.assign(partitions);
recordSupplier.seek(StreamPartition.of(stream, shardId1), "5");
recordSupplier.seek(StreamPartition.of(stream, SHARD_ID1), "5");
recordSupplier.start();
for (int i = 0; i < 10 && recordSupplier.bufferSize() < 6; i++) {
Thread.sleep(100);
}
OrderedPartitionableRecord<String, String> firstRecord = recordSupplier.poll(poll_timeout_millis).get(0);
OrderedPartitionableRecord<String, String> firstRecord = recordSupplier.poll(POLL_TIMEOUT_MILLIS).get(0);
Assert.assertEquals(
allRecords.get(7),
ALL_RECORDS.get(7),
firstRecord
);
recordSupplier.seek(StreamPartition.of(stream, shardId1), "7");
recordSupplier.seek(StreamPartition.of(stream, SHARD_ID1), "7");
recordSupplier.start();
while (recordSupplier.bufferSize() < 4) {
Thread.sleep(100);
}
OrderedPartitionableRecord<String, String> record2 = recordSupplier.poll(poll_timeout_millis).get(0);
OrderedPartitionableRecord<String, String> record2 = recordSupplier.poll(POLL_TIMEOUT_MILLIS).get(0);
Assert.assertEquals(allRecords.get(9), record2);
Assert.assertEquals(ALL_RECORDS.get(9), record2);
verifyAll();
}
@ -544,38 +553,40 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
{
recordsPerFetch = 100;
expect(kinesis.getShardIterator(
anyObject(),
eq(shardId0),
anyString(),
anyString()
EasyMock.expect(kinesis.getShardIterator(
EasyMock.anyObject(),
EasyMock.eq(SHARD_ID0),
EasyMock.anyString(),
EasyMock.anyString()
)).andReturn(
getShardIteratorResult0).anyTimes();
expect(kinesis.getShardIterator(
anyObject(),
eq(shardId1),
anyString(),
anyString()
EasyMock.expect(kinesis.getShardIterator(
EasyMock.anyObject(),
EasyMock.eq(SHARD_ID1),
EasyMock.anyString(),
EasyMock.anyString()
)).andReturn(
getShardIteratorResult1).anyTimes();
expect(getShardIteratorResult0.getShardIterator()).andReturn(shard0Iterator).anyTimes();
expect(getShardIteratorResult1.getShardIterator()).andReturn(shard1Iterator).anyTimes();
expect(kinesis.getRecords(generateGetRecordsReq(shard0Iterator, recordsPerFetch))).andReturn(getRecordsResult0)
.anyTimes();
expect(kinesis.getRecords(generateGetRecordsReq(shard1Iterator, recordsPerFetch))).andReturn(getRecordsResult1)
.anyTimes();
expect(getRecordsResult0.getRecords()).andReturn(shard0Records).once();
expect(getRecordsResult1.getRecords()).andReturn(shard1Records).once();
expect(getRecordsResult0.getNextShardIterator()).andReturn(null).anyTimes();
expect(getRecordsResult1.getNextShardIterator()).andReturn(null).anyTimes();
EasyMock.expect(getShardIteratorResult0.getShardIterator()).andReturn(SHARD0_ITERATOR).anyTimes();
EasyMock.expect(getShardIteratorResult1.getShardIterator()).andReturn(SHARD1_ITERATOR).anyTimes();
EasyMock.expect(kinesis.getRecords(generateGetRecordsReq(SHARD0_ITERATOR, recordsPerFetch)))
.andReturn(getRecordsResult0)
.anyTimes();
EasyMock.expect(kinesis.getRecords(generateGetRecordsReq(SHARD1_ITERATOR, recordsPerFetch)))
.andReturn(getRecordsResult1)
.anyTimes();
EasyMock.expect(getRecordsResult0.getRecords()).andReturn(SHARD0_RECORDS).once();
EasyMock.expect(getRecordsResult1.getRecords()).andReturn(SHARD1_RECORDS).once();
EasyMock.expect(getRecordsResult0.getNextShardIterator()).andReturn(null).anyTimes();
EasyMock.expect(getRecordsResult1.getNextShardIterator()).andReturn(null).anyTimes();
replayAll();
Set<StreamPartition<String>> partitions = ImmutableSet.of(
StreamPartition.of(stream, shardId0),
StreamPartition.of(stream, shardId1)
StreamPartition.of(stream, SHARD_ID0),
StreamPartition.of(stream, SHARD_ID1)
);
@ -601,12 +612,12 @@ public class KinesisRecordSupplierTest extends EasyMockSupport
}
List<OrderedPartitionableRecord<String, String>> polledRecords = cleanRecords(recordSupplier.poll(
poll_timeout_millis));
POLL_TIMEOUT_MILLIS));
verifyAll();
Assert.assertEquals(partitions, recordSupplier.getAssignment());
Assert.assertTrue(polledRecords.containsAll(allRecords));
Assert.assertTrue(polledRecords.containsAll(ALL_RECORDS));
}
/**

View File

@ -52,6 +52,7 @@ import org.apache.druid.query.aggregation.DoubleSumAggregatorFactory;
import org.apache.druid.segment.TestHelper;
import org.apache.druid.segment.indexing.DataSchema;
import org.apache.druid.segment.indexing.granularity.UniformGranularitySpec;
import org.easymock.EasyMock;
import org.easymock.EasyMockSupport;
import org.junit.Assert;
import org.junit.Test;
@ -63,10 +64,6 @@ import java.util.Iterator;
import java.util.List;
import java.util.Map;
import static org.easymock.EasyMock.anyLong;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.expectLastCall;
public class KinesisSamplerSpecTest extends EasyMockSupport
{
private static final ObjectMapper objectMapper = TestHelper.makeJsonMapper();
@ -132,18 +129,18 @@ public class KinesisSamplerSpecTest extends EasyMockSupport
@Test(timeout = 10_000L)
public void testSample() throws Exception
{
expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).once();
EasyMock.expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).once();
recordSupplier.assign(ImmutableSet.of(StreamPartition.of(STREAM, SHARD_ID)));
expectLastCall().once();
EasyMock.expectLastCall().once();
recordSupplier.seekToEarliest(ImmutableSet.of(StreamPartition.of(STREAM, SHARD_ID)));
expectLastCall().once();
EasyMock.expectLastCall().once();
expect(recordSupplier.poll(anyLong())).andReturn(generateRecords(STREAM)).once();
EasyMock.expect(recordSupplier.poll(EasyMock.anyLong())).andReturn(generateRecords(STREAM)).once();
recordSupplier.close();
expectLastCall().once();
EasyMock.expectLastCall().once();
replayAll();

View File

@ -37,6 +37,7 @@ import org.apache.hadoop.mapreduce.task.TaskAttemptContextImpl;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.orc.mapred.OrcStruct;
import org.apache.orc.mapreduce.OrcInputFormat;
import org.junit.Assert;
import org.junit.Test;
import java.io.File;
@ -44,8 +45,6 @@ import java.io.IOException;
import java.util.ArrayList;
import java.util.List;
import static org.junit.Assert.assertEquals;
public class OrcHadoopInputRowParserTest
{
@Test
@ -62,14 +61,14 @@ public class OrcHadoopInputRowParserTest
*/
OrcStruct data = getFirstRow(job, ((StaticPathSpec) config.getPathSpec()).getPaths());
List<InputRow> rows = (List<InputRow>) config.getParser().parseBatch(data);
assertEquals(3, rows.get(0).getDimensions().size());
assertEquals("bar", rows.get(0).getDimension("col1").get(0));
Assert.assertEquals(3, rows.get(0).getDimensions().size());
Assert.assertEquals("bar", rows.get(0).getDimension("col1").get(0));
String s1 = rows.get(0).getDimension("col2").get(0);
String s2 = rows.get(0).getDimension("col2").get(1);
String s3 = rows.get(0).getDimension("col2").get(2);
assertEquals("dat1", s1);
assertEquals("dat2", s2);
assertEquals("dat3", s3);
Assert.assertEquals("dat1", s1);
Assert.assertEquals("dat2", s2);
Assert.assertEquals("dat3", s3);
}
@Test
@ -85,16 +84,16 @@ public class OrcHadoopInputRowParserTest
*/
OrcStruct data = getFirstRow(job, ((StaticPathSpec) config.getPathSpec()).getPaths());
List<InputRow> rows = (List<InputRow>) config.getParser().parseBatch(data);
assertEquals(7, rows.get(0).getDimensions().size());
assertEquals("bar", rows.get(0).getDimension("col1").get(0));
assertEquals("dat1", rows.get(0).getDimension("col2").get(0));
assertEquals("dat2", rows.get(0).getDimension("col2").get(1));
assertEquals("dat3", rows.get(0).getDimension("col2").get(2));
assertEquals(1.1f, rows.get(0).getRaw("col3"));
assertEquals(2L, rows.get(0).getRaw("col4"));
assertEquals(3.5d, rows.get(0).getRaw("col5"));
assertEquals(ImmutableList.of(), rows.get(0).getRaw("col6"));
assertEquals("subval7", rows.get(0).getRaw("col7-subcol7"));
Assert.assertEquals(7, rows.get(0).getDimensions().size());
Assert.assertEquals("bar", rows.get(0).getDimension("col1").get(0));
Assert.assertEquals("dat1", rows.get(0).getDimension("col2").get(0));
Assert.assertEquals("dat2", rows.get(0).getDimension("col2").get(1));
Assert.assertEquals("dat3", rows.get(0).getDimension("col2").get(2));
Assert.assertEquals(1.1f, rows.get(0).getRaw("col3"));
Assert.assertEquals(2L, rows.get(0).getRaw("col4"));
Assert.assertEquals(3.5d, rows.get(0).getRaw("col5"));
Assert.assertEquals(ImmutableList.of(), rows.get(0).getRaw("col6"));
Assert.assertEquals("subval7", rows.get(0).getRaw("col7-subcol7"));
}
@Test
@ -115,40 +114,40 @@ public class OrcHadoopInputRowParserTest
OrcStruct data = getFirstRow(job, ((StaticPathSpec) config.getPathSpec()).getPaths());
List<InputRow> rows = (List<InputRow>) config.getParser().parseBatch(data);
assertEquals(14, rows.get(0).getDimensions().size());
assertEquals("false", rows.get(0).getDimension("boolean1").get(0));
assertEquals("1", rows.get(0).getDimension("byte1").get(0));
assertEquals("1024", rows.get(0).getDimension("short1").get(0));
assertEquals("65536", rows.get(0).getDimension("int1").get(0));
assertEquals("9223372036854775807", rows.get(0).getDimension("long1").get(0));
assertEquals("1.0", rows.get(0).getDimension("float1").get(0));
assertEquals("-15.0", rows.get(0).getDimension("double1").get(0));
assertEquals("AAECAwQAAA==", rows.get(0).getDimension("bytes1").get(0));
assertEquals("hi", rows.get(0).getDimension("string1").get(0));
assertEquals("1.23456786547456E7", rows.get(0).getDimension("decimal1").get(0));
assertEquals("2", rows.get(0).getDimension("struct_list_struct_int").get(0));
assertEquals("1", rows.get(0).getDimension("struct_list_struct_intlist").get(0));
assertEquals("2", rows.get(0).getDimension("struct_list_struct_intlist").get(1));
assertEquals("good", rows.get(0).getDimension("list_struct_string").get(0));
assertEquals(DateTimes.of("2000-03-12T15:00:00.0Z"), rows.get(0).getTimestamp());
Assert.assertEquals(14, rows.get(0).getDimensions().size());
Assert.assertEquals("false", rows.get(0).getDimension("boolean1").get(0));
Assert.assertEquals("1", rows.get(0).getDimension("byte1").get(0));
Assert.assertEquals("1024", rows.get(0).getDimension("short1").get(0));
Assert.assertEquals("65536", rows.get(0).getDimension("int1").get(0));
Assert.assertEquals("9223372036854775807", rows.get(0).getDimension("long1").get(0));
Assert.assertEquals("1.0", rows.get(0).getDimension("float1").get(0));
Assert.assertEquals("-15.0", rows.get(0).getDimension("double1").get(0));
Assert.assertEquals("AAECAwQAAA==", rows.get(0).getDimension("bytes1").get(0));
Assert.assertEquals("hi", rows.get(0).getDimension("string1").get(0));
Assert.assertEquals("1.23456786547456E7", rows.get(0).getDimension("decimal1").get(0));
Assert.assertEquals("2", rows.get(0).getDimension("struct_list_struct_int").get(0));
Assert.assertEquals("1", rows.get(0).getDimension("struct_list_struct_intlist").get(0));
Assert.assertEquals("2", rows.get(0).getDimension("struct_list_struct_intlist").get(1));
Assert.assertEquals("good", rows.get(0).getDimension("list_struct_string").get(0));
Assert.assertEquals(DateTimes.of("2000-03-12T15:00:00.0Z"), rows.get(0).getTimestamp());
// first row has empty 'map' column, so lets read another!
List<InputRow> allRows = getAllRows(config);
InputRow anotherRow = allRows.get(0);
assertEquals(14, rows.get(0).getDimensions().size());
assertEquals("true", anotherRow.getDimension("boolean1").get(0));
assertEquals("100", anotherRow.getDimension("byte1").get(0));
assertEquals("2048", anotherRow.getDimension("short1").get(0));
assertEquals("65536", anotherRow.getDimension("int1").get(0));
assertEquals("9223372036854775807", anotherRow.getDimension("long1").get(0));
assertEquals("2.0", anotherRow.getDimension("float1").get(0));
assertEquals("-5.0", anotherRow.getDimension("double1").get(0));
assertEquals("AAECAwQAAA==", rows.get(0).getDimension("bytes1").get(0));
assertEquals("bye", anotherRow.getDimension("string1").get(0));
assertEquals("1.23456786547457E7", anotherRow.getDimension("decimal1").get(0));
assertEquals("2", anotherRow.getDimension("struct_list_struct_int").get(0));
assertEquals("cat", anotherRow.getDimension("list_struct_string").get(0));
assertEquals("5", anotherRow.getDimension("map_struct_int").get(0));
Assert.assertEquals(14, rows.get(0).getDimensions().size());
Assert.assertEquals("true", anotherRow.getDimension("boolean1").get(0));
Assert.assertEquals("100", anotherRow.getDimension("byte1").get(0));
Assert.assertEquals("2048", anotherRow.getDimension("short1").get(0));
Assert.assertEquals("65536", anotherRow.getDimension("int1").get(0));
Assert.assertEquals("9223372036854775807", anotherRow.getDimension("long1").get(0));
Assert.assertEquals("2.0", anotherRow.getDimension("float1").get(0));
Assert.assertEquals("-5.0", anotherRow.getDimension("double1").get(0));
Assert.assertEquals("AAECAwQAAA==", rows.get(0).getDimension("bytes1").get(0));
Assert.assertEquals("bye", anotherRow.getDimension("string1").get(0));
Assert.assertEquals("1.23456786547457E7", anotherRow.getDimension("decimal1").get(0));
Assert.assertEquals("2", anotherRow.getDimension("struct_list_struct_int").get(0));
Assert.assertEquals("cat", anotherRow.getDimension("list_struct_string").get(0));
Assert.assertEquals("5", anotherRow.getDimension("map_struct_int").get(0));
}
@Test
@ -167,12 +166,12 @@ public class OrcHadoopInputRowParserTest
OrcStruct data = getFirstRow(job, ((StaticPathSpec) config.getPathSpec()).getPaths());
List<InputRow> rows = (List<InputRow>) config.getParser().parseBatch(data);
assertEquals(4, rows.get(0).getDimensions().size());
assertEquals("2", rows.get(0).getDimension("userid").get(0));
assertEquals("foo", rows.get(0).getDimension("string1").get(0));
assertEquals("0.8", rows.get(0).getDimension("subtype").get(0));
assertEquals("1.2", rows.get(0).getDimension("decimal1").get(0));
assertEquals(DateTimes.of("1969-12-31T16:00:00.0Z"), rows.get(0).getTimestamp());
Assert.assertEquals(4, rows.get(0).getDimensions().size());
Assert.assertEquals("2", rows.get(0).getDimension("userid").get(0));
Assert.assertEquals("foo", rows.get(0).getDimension("string1").get(0));
Assert.assertEquals("0.8", rows.get(0).getDimension("subtype").get(0));
Assert.assertEquals("1.2", rows.get(0).getDimension("decimal1").get(0));
Assert.assertEquals(DateTimes.of("1969-12-31T16:00:00.0Z"), rows.get(0).getTimestamp());
}
@Test
@ -189,9 +188,9 @@ public class OrcHadoopInputRowParserTest
OrcStruct data = getFirstRow(job, ((StaticPathSpec) config.getPathSpec()).getPaths());
List<InputRow> rows = (List<InputRow>) config.getParser().parseBatch(data);
assertEquals(1, rows.get(0).getDimensions().size());
assertEquals("1900-12-25T00:00:00.000Z", rows.get(0).getDimension("date").get(0));
assertEquals(DateTimes.of("1900-05-05T12:34:56.1Z"), rows.get(0).getTimestamp());
Assert.assertEquals(1, rows.get(0).getDimensions().size());
Assert.assertEquals("1900-12-25T00:00:00.000Z", rows.get(0).getDimension("date").get(0));
Assert.assertEquals(DateTimes.of("1900-05-05T12:34:56.1Z"), rows.get(0).getTimestamp());
}
@Test
@ -208,9 +207,9 @@ public class OrcHadoopInputRowParserTest
OrcStruct data = getFirstRow(job, ((StaticPathSpec) config.getPathSpec()).getPaths());
List<InputRow> rows = (List<InputRow>) config.getParser().parseBatch(data);
assertEquals(1, rows.get(0).getDimensions().size());
assertEquals("2038-12-25T00:00:00.000Z", rows.get(0).getDimension("date").get(0));
assertEquals(DateTimes.of("2038-05-05T12:34:56.1Z"), rows.get(0).getTimestamp());
Assert.assertEquals(1, rows.get(0).getDimensions().size());
Assert.assertEquals("2038-12-25T00:00:00.000Z", rows.get(0).getDimension("date").get(0));
Assert.assertEquals(DateTimes.of("2038-05-05T12:34:56.1Z"), rows.get(0).getTimestamp());
}
private static HadoopDruidIndexerConfig loadHadoopDruidIndexerConfig(String configPath)

View File

@ -25,6 +25,7 @@ import org.apache.druid.indexer.HadoopDruidIndexerConfig;
import org.apache.druid.indexer.path.StaticPathSpec;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.Job;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@ -32,9 +33,6 @@ import org.junit.runners.Parameterized;
import java.io.IOException;
import java.util.List;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
@RunWith(Parameterized.class)
public class CompatParquetInputTest extends BaseParquetInputTest
{
@ -70,8 +68,8 @@ public class CompatParquetInputTest extends BaseParquetInputTest
InputRow row = ((List<InputRow>) config.getParser().parseBatch(data)).get(0);
// without binaryAsString: true, the value would something like "[104, 101, 121, 32, 116, 104, 105, 115, 32, 105, 115, 3.... ]"
assertEquals(row.getDimension("field").get(0), "hey this is &é(-è_çà)=^$ù*! Ω^^");
assertEquals(row.getTimestampFromEpoch(), 1471800234);
Assert.assertEquals("hey this is &é(-è_çà)=^$ù*! Ω^^", row.getDimension("field").get(0));
Assert.assertEquals(1471800234, row.getTimestampFromEpoch());
}
@ -87,10 +85,10 @@ public class CompatParquetInputTest extends BaseParquetInputTest
Object data = getFirstRow(job, parserType, ((StaticPathSpec) config.getPathSpec()).getPaths());
List<InputRow> rows = (List<InputRow>) config.getParser().parseBatch(data);
List<InputRow> rows2 = getAllRows(parserType, config);
assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString());
assertEquals("-1", rows.get(0).getDimension("col").get(0));
assertEquals(-1, rows.get(0).getMetric("metric1"));
assertTrue(rows2.get(2).getDimension("col").isEmpty());
Assert.assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString());
Assert.assertEquals("-1", rows.get(0).getDimension("col").get(0));
Assert.assertEquals(-1, rows.get(0).getMetric("metric1"));
Assert.assertTrue(rows2.get(2).getDimension("col").isEmpty());
}
@Test
@ -158,30 +156,30 @@ public class CompatParquetInputTest extends BaseParquetInputTest
config.intoConfiguration(job);
Object data = getFirstRow(job, parserType, ((StaticPathSpec) config.getPathSpec()).getPaths());
List<InputRow> rows = (List<InputRow>) config.getParser().parseBatch(data);
assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString());
assertEquals("true", rows.get(0).getDimension("boolColumn").get(0));
assertEquals("0", rows.get(0).getDimension("byteColumn").get(0));
assertEquals("1", rows.get(0).getDimension("shortColumn").get(0));
assertEquals("2", rows.get(0).getDimension("intColumn").get(0));
assertEquals("0", rows.get(0).getDimension("longColumn").get(0));
assertEquals("0.2", rows.get(0).getDimension("doubleColumn").get(0));
assertEquals("val_0", rows.get(0).getDimension("binaryColumn").get(0));
assertEquals("val_0", rows.get(0).getDimension("stringColumn").get(0));
assertEquals("SPADES", rows.get(0).getDimension("enumColumn").get(0));
assertTrue(rows.get(0).getDimension("maybeBoolColumn").isEmpty());
assertTrue(rows.get(0).getDimension("maybeByteColumn").isEmpty());
assertTrue(rows.get(0).getDimension("maybeShortColumn").isEmpty());
assertTrue(rows.get(0).getDimension("maybeIntColumn").isEmpty());
assertTrue(rows.get(0).getDimension("maybeLongColumn").isEmpty());
assertTrue(rows.get(0).getDimension("maybeDoubleColumn").isEmpty());
assertTrue(rows.get(0).getDimension("maybeBinaryColumn").isEmpty());
assertTrue(rows.get(0).getDimension("maybeStringColumn").isEmpty());
assertTrue(rows.get(0).getDimension("maybeEnumColumn").isEmpty());
assertEquals("arr_0", rows.get(0).getDimension("stringsColumn").get(0));
assertEquals("arr_1", rows.get(0).getDimension("stringsColumn").get(1));
assertEquals("0", rows.get(0).getDimension("intSetColumn").get(0));
assertEquals("val_1", rows.get(0).getDimension("extractByLogicalMap").get(0));
assertEquals("1", rows.get(0).getDimension("extractByComplexLogicalMap").get(0));
Assert.assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString());
Assert.assertEquals("true", rows.get(0).getDimension("boolColumn").get(0));
Assert.assertEquals("0", rows.get(0).getDimension("byteColumn").get(0));
Assert.assertEquals("1", rows.get(0).getDimension("shortColumn").get(0));
Assert.assertEquals("2", rows.get(0).getDimension("intColumn").get(0));
Assert.assertEquals("0", rows.get(0).getDimension("longColumn").get(0));
Assert.assertEquals("0.2", rows.get(0).getDimension("doubleColumn").get(0));
Assert.assertEquals("val_0", rows.get(0).getDimension("binaryColumn").get(0));
Assert.assertEquals("val_0", rows.get(0).getDimension("stringColumn").get(0));
Assert.assertEquals("SPADES", rows.get(0).getDimension("enumColumn").get(0));
Assert.assertTrue(rows.get(0).getDimension("maybeBoolColumn").isEmpty());
Assert.assertTrue(rows.get(0).getDimension("maybeByteColumn").isEmpty());
Assert.assertTrue(rows.get(0).getDimension("maybeShortColumn").isEmpty());
Assert.assertTrue(rows.get(0).getDimension("maybeIntColumn").isEmpty());
Assert.assertTrue(rows.get(0).getDimension("maybeLongColumn").isEmpty());
Assert.assertTrue(rows.get(0).getDimension("maybeDoubleColumn").isEmpty());
Assert.assertTrue(rows.get(0).getDimension("maybeBinaryColumn").isEmpty());
Assert.assertTrue(rows.get(0).getDimension("maybeStringColumn").isEmpty());
Assert.assertTrue(rows.get(0).getDimension("maybeEnumColumn").isEmpty());
Assert.assertEquals("arr_0", rows.get(0).getDimension("stringsColumn").get(0));
Assert.assertEquals("arr_1", rows.get(0).getDimension("stringsColumn").get(1));
Assert.assertEquals("0", rows.get(0).getDimension("intSetColumn").get(0));
Assert.assertEquals("val_1", rows.get(0).getDimension("extractByLogicalMap").get(0));
Assert.assertEquals("1", rows.get(0).getDimension("extractByComplexLogicalMap").get(0));
}
@Test
@ -199,10 +197,10 @@ public class CompatParquetInputTest extends BaseParquetInputTest
);
config.intoConfiguration(job);
List<InputRow> rows = getAllRows(parserType, config);
assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString());
assertEquals("1", rows.get(0).getDimension("repeatedInt").get(0));
assertEquals("2", rows.get(0).getDimension("repeatedInt").get(1));
assertEquals("3", rows.get(0).getDimension("repeatedInt").get(2));
Assert.assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString());
Assert.assertEquals("1", rows.get(0).getDimension("repeatedInt").get(0));
Assert.assertEquals("2", rows.get(0).getDimension("repeatedInt").get(1));
Assert.assertEquals("3", rows.get(0).getDimension("repeatedInt").get(2));
}
@Test
@ -223,10 +221,10 @@ public class CompatParquetInputTest extends BaseParquetInputTest
config.intoConfiguration(job);
List<InputRow> rows = getAllRows(parserType, config);
assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString());
assertEquals("5", rows.get(0).getDimension("primitive").get(0));
assertEquals("4", rows.get(0).getDimension("extracted1").get(0));
assertEquals("6", rows.get(0).getDimension("extracted2").get(0));
Assert.assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString());
Assert.assertEquals("5", rows.get(0).getDimension("primitive").get(0));
Assert.assertEquals("4", rows.get(0).getDimension("extracted1").get(0));
Assert.assertEquals("6", rows.get(0).getDimension("extracted2").get(0));
}
@Test
@ -245,13 +243,13 @@ public class CompatParquetInputTest extends BaseParquetInputTest
);
config.intoConfiguration(job);
List<InputRow> rows = getAllRows(parserType, config);
assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString());
assertEquals("10", rows.get(0).getDimension("optionalPrimitive").get(0));
assertEquals("9", rows.get(0).getDimension("requiredPrimitive").get(0));
assertTrue(rows.get(0).getDimension("repeatedPrimitive").isEmpty());
assertTrue(rows.get(0).getDimension("extractedOptional").isEmpty());
assertEquals("9", rows.get(0).getDimension("extractedRequired").get(0));
assertEquals("9", rows.get(0).getDimension("extractedRepeated").get(0));
assertEquals("10", rows.get(0).getDimension("extractedRepeated").get(1));
Assert.assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString());
Assert.assertEquals("10", rows.get(0).getDimension("optionalPrimitive").get(0));
Assert.assertEquals("9", rows.get(0).getDimension("requiredPrimitive").get(0));
Assert.assertTrue(rows.get(0).getDimension("repeatedPrimitive").isEmpty());
Assert.assertTrue(rows.get(0).getDimension("extractedOptional").isEmpty());
Assert.assertEquals("9", rows.get(0).getDimension("extractedRequired").get(0));
Assert.assertEquals("9", rows.get(0).getDimension("extractedRepeated").get(0));
Assert.assertEquals("10", rows.get(0).getDimension("extractedRepeated").get(1));
}
}

View File

@ -22,8 +22,7 @@ package org.apache.druid.data.input.parquet;
import com.google.common.collect.ImmutableList;
import org.apache.druid.data.input.InputRow;
import org.apache.druid.indexer.HadoopDruidIndexerConfig;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.Job;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@ -32,8 +31,6 @@ import java.io.IOException;
import java.math.BigDecimal;
import java.util.List;
import static org.junit.Assert.assertEquals;
@RunWith(Parameterized.class)
public class DecimalParquetInputTest extends BaseParquetInputTest
{
@ -47,12 +44,10 @@ public class DecimalParquetInputTest extends BaseParquetInputTest
}
private final String parserType;
private final Job job;
public DecimalParquetInputTest(String parserType) throws IOException
{
this.parserType = parserType;
this.job = Job.getInstance(new Configuration());
}
@Test
@ -68,9 +63,9 @@ public class DecimalParquetInputTest extends BaseParquetInputTest
true
);
List<InputRow> rows = getAllRows(parserType, config);
assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString());
assertEquals("1.0", rows.get(0).getDimension("fixed_len_dec").get(0));
assertEquals(new BigDecimal("1.0"), rows.get(0).getMetric("metric1"));
Assert.assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString());
Assert.assertEquals("1.0", rows.get(0).getDimension("fixed_len_dec").get(0));
Assert.assertEquals(new BigDecimal("1.0"), rows.get(0).getMetric("metric1"));
}
@Test
@ -86,9 +81,9 @@ public class DecimalParquetInputTest extends BaseParquetInputTest
true
);
List<InputRow> rows = getAllRows(parserType, config);
assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString());
assertEquals("100", rows.get(0).getDimension("i32_dec").get(0));
assertEquals(new BigDecimal(100), rows.get(0).getMetric("metric1"));
Assert.assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString());
Assert.assertEquals("100", rows.get(0).getDimension("i32_dec").get(0));
Assert.assertEquals(new BigDecimal(100), rows.get(0).getMetric("metric1"));
}
@Test
@ -104,8 +99,8 @@ public class DecimalParquetInputTest extends BaseParquetInputTest
true
);
List<InputRow> rows = getAllRows(parserType, config);
assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString());
assertEquals("100", rows.get(0).getDimension("i64_dec").get(0));
assertEquals(new BigDecimal(100), rows.get(0).getMetric("metric1"));
Assert.assertEquals("2018-09-01T00:00:00.000Z", rows.get(0).getTimestamp().toString());
Assert.assertEquals("100", rows.get(0).getDimension("i64_dec").get(0));
Assert.assertEquals(new BigDecimal(100), rows.get(0).getMetric("metric1"));
}
}

View File

@ -33,13 +33,10 @@ import org.junit.runners.Parameterized;
import java.io.IOException;
import java.util.List;
import static org.junit.Assert.assertEquals;
@RunWith(Parameterized.class)
public class FlattenSpecParquetInputTest extends BaseParquetInputTest
{
private static String TS1 = "2018-09-18T00:18:00.023Z";
private static final String TS1 = "2018-09-18T00:18:00.023Z";
@Parameterized.Parameters(name = "type = {0}")
public static Iterable<Object[]> constructorFeeder()
@ -71,13 +68,13 @@ public class FlattenSpecParquetInputTest extends BaseParquetInputTest
Object data = getFirstRow(job, parserType, ((StaticPathSpec) config.getPathSpec()).getPaths());
List<InputRow> rows = (List<InputRow>) config.getParser().parseBatch(data);
assertEquals(TS1, rows.get(0).getTimestamp().toString());
assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0));
assertEquals("d2v1", rows.get(0).getDimension("dim2").get(0));
assertEquals("1", rows.get(0).getDimension("dim3").get(0));
assertEquals("listDim1v1", rows.get(0).getDimension("listDim").get(0));
assertEquals("listDim1v2", rows.get(0).getDimension("listDim").get(1));
assertEquals(1, rows.get(0).getMetric("metric1").longValue());
Assert.assertEquals(TS1, rows.get(0).getTimestamp().toString());
Assert.assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0));
Assert.assertEquals("d2v1", rows.get(0).getDimension("dim2").get(0));
Assert.assertEquals("1", rows.get(0).getDimension("dim3").get(0));
Assert.assertEquals("listDim1v1", rows.get(0).getDimension("listDim").get(0));
Assert.assertEquals("listDim1v2", rows.get(0).getDimension("listDim").get(1));
Assert.assertEquals(1, rows.get(0).getMetric("metric1").longValue());
}
@Test
@ -92,13 +89,13 @@ public class FlattenSpecParquetInputTest extends BaseParquetInputTest
Object data = getFirstRow(job, parserType, ((StaticPathSpec) config.getPathSpec()).getPaths());
List<InputRow> rows = (List<InputRow>) config.getParser().parseBatch(data);
assertEquals(TS1, rows.get(0).getTimestamp().toString());
assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0));
assertEquals("d2v1", rows.get(0).getDimension("dim2").get(0));
assertEquals("1", rows.get(0).getDimension("dim3").get(0));
assertEquals("listDim1v1", rows.get(0).getDimension("listDim").get(0));
assertEquals("listDim1v2", rows.get(0).getDimension("listDim").get(1));
assertEquals(1, rows.get(0).getMetric("metric1").longValue());
Assert.assertEquals(TS1, rows.get(0).getTimestamp().toString());
Assert.assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0));
Assert.assertEquals("d2v1", rows.get(0).getDimension("dim2").get(0));
Assert.assertEquals("1", rows.get(0).getDimension("dim3").get(0));
Assert.assertEquals("listDim1v1", rows.get(0).getDimension("listDim").get(0));
Assert.assertEquals("listDim1v2", rows.get(0).getDimension("listDim").get(1));
Assert.assertEquals(1, rows.get(0).getMetric("metric1").longValue());
}
@Test
@ -113,13 +110,13 @@ public class FlattenSpecParquetInputTest extends BaseParquetInputTest
Object data = getFirstRow(job, parserType, ((StaticPathSpec) config.getPathSpec()).getPaths());
List<InputRow> rows = (List<InputRow>) config.getParser().parseBatch(data);
assertEquals(TS1, rows.get(0).getTimestamp().toString());
assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0));
assertEquals("d2v1", rows.get(0).getDimension("dim2").get(0));
assertEquals("1", rows.get(0).getDimension("dim3").get(0));
assertEquals("listDim1v1", rows.get(0).getDimension("list").get(0));
assertEquals("listDim1v2", rows.get(0).getDimension("list").get(1));
assertEquals(1, rows.get(0).getMetric("metric1").longValue());
Assert.assertEquals(TS1, rows.get(0).getTimestamp().toString());
Assert.assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0));
Assert.assertEquals("d2v1", rows.get(0).getDimension("dim2").get(0));
Assert.assertEquals("1", rows.get(0).getDimension("dim3").get(0));
Assert.assertEquals("listDim1v1", rows.get(0).getDimension("list").get(0));
Assert.assertEquals("listDim1v2", rows.get(0).getDimension("list").get(1));
Assert.assertEquals(1, rows.get(0).getMetric("metric1").longValue());
}
@Test
@ -134,11 +131,11 @@ public class FlattenSpecParquetInputTest extends BaseParquetInputTest
Object data = getFirstRow(job, parserType, ((StaticPathSpec) config.getPathSpec()).getPaths());
List<InputRow> rows = (List<InputRow>) config.getParser().parseBatch(data);
assertEquals(TS1, rows.get(0).getTimestamp().toString());
assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0));
assertEquals("d2v1", rows.get(0).getDimension("dim2").get(0));
assertEquals("listDim1v2", rows.get(0).getDimension("listextracted").get(0));
assertEquals(1, rows.get(0).getMetric("metric1").longValue());
Assert.assertEquals(TS1, rows.get(0).getTimestamp().toString());
Assert.assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0));
Assert.assertEquals("d2v1", rows.get(0).getDimension("dim2").get(0));
Assert.assertEquals("listDim1v2", rows.get(0).getDimension("listextracted").get(0));
Assert.assertEquals(1, rows.get(0).getMetric("metric1").longValue());
}
@ -154,14 +151,14 @@ public class FlattenSpecParquetInputTest extends BaseParquetInputTest
Object data = getFirstRow(job, parserType, ((StaticPathSpec) config.getPathSpec()).getPaths());
List<InputRow> rows = (List<InputRow>) config.getParser().parseBatch(data);
assertEquals(TS1, rows.get(0).getTimestamp().toString());
assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0));
Assert.assertEquals(TS1, rows.get(0).getTimestamp().toString());
Assert.assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0));
List<String> dims = rows.get(0).getDimensions();
Assert.assertFalse(dims.contains("dim2"));
Assert.assertFalse(dims.contains("dim3"));
Assert.assertFalse(dims.contains("listDim"));
Assert.assertFalse(dims.contains("nestedData"));
assertEquals(1, rows.get(0).getMetric("metric1").longValue());
Assert.assertEquals(1, rows.get(0).getMetric("metric1").longValue());
}
@Test
@ -176,13 +173,13 @@ public class FlattenSpecParquetInputTest extends BaseParquetInputTest
Object data = getFirstRow(job, parserType, ((StaticPathSpec) config.getPathSpec()).getPaths());
List<InputRow> rows = (List<InputRow>) config.getParser().parseBatch(data);
assertEquals(TS1, rows.get(0).getTimestamp().toString());
assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0));
Assert.assertEquals(TS1, rows.get(0).getTimestamp().toString());
Assert.assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0));
List<String> dims = rows.get(0).getDimensions();
Assert.assertFalse(dims.contains("dim2"));
Assert.assertFalse(dims.contains("dim3"));
Assert.assertFalse(dims.contains("listDim"));
assertEquals(1, rows.get(0).getMetric("metric1").longValue());
Assert.assertEquals(1, rows.get(0).getMetric("metric1").longValue());
}
@Test
@ -197,14 +194,14 @@ public class FlattenSpecParquetInputTest extends BaseParquetInputTest
Object data = getFirstRow(job, parserType, ((StaticPathSpec) config.getPathSpec()).getPaths());
List<InputRow> rows = (List<InputRow>) config.getParser().parseBatch(data);
assertEquals(TS1, rows.get(0).getTimestamp().toString());
assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0));
assertEquals("d2v1", rows.get(0).getDimension("dim2").get(0));
assertEquals("1", rows.get(0).getDimension("dim3").get(0));
assertEquals("listDim1v1", rows.get(0).getDimension("listDim").get(0));
assertEquals("listDim1v2", rows.get(0).getDimension("listDim").get(1));
assertEquals(1, rows.get(0).getMetric("metric1").longValue());
assertEquals(2, rows.get(0).getMetric("metric2").longValue());
Assert.assertEquals(TS1, rows.get(0).getTimestamp().toString());
Assert.assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0));
Assert.assertEquals("d2v1", rows.get(0).getDimension("dim2").get(0));
Assert.assertEquals("1", rows.get(0).getDimension("dim3").get(0));
Assert.assertEquals("listDim1v1", rows.get(0).getDimension("listDim").get(0));
Assert.assertEquals("listDim1v2", rows.get(0).getDimension("listDim").get(1));
Assert.assertEquals(1, rows.get(0).getMetric("metric1").longValue());
Assert.assertEquals(2, rows.get(0).getMetric("metric2").longValue());
}
@Test
@ -219,12 +216,11 @@ public class FlattenSpecParquetInputTest extends BaseParquetInputTest
Object data = getFirstRow(job, parserType, ((StaticPathSpec) config.getPathSpec()).getPaths());
List<InputRow> rows = (List<InputRow>) config.getParser().parseBatch(data);
assertEquals(TS1, rows.get(0).getTimestamp().toString());
assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0));
assertEquals("d2v1", rows.get(0).getDimension("dim2").get(0));
assertEquals("1", rows.get(0).getDimension("dim3").get(0));
assertEquals("listDim1v2", rows.get(0).getDimension("listextracted").get(0));
assertEquals(1, rows.get(0).getMetric("metric1").longValue());
Assert.assertEquals(TS1, rows.get(0).getTimestamp().toString());
Assert.assertEquals("d1v1", rows.get(0).getDimension("dim1").get(0));
Assert.assertEquals("d2v1", rows.get(0).getDimension("dim2").get(0));
Assert.assertEquals("1", rows.get(0).getDimension("dim3").get(0));
Assert.assertEquals("listDim1v2", rows.get(0).getDimension("listextracted").get(0));
Assert.assertEquals(1, rows.get(0).getMetric("metric1").longValue());
}
}

View File

@ -25,6 +25,7 @@ import org.apache.druid.indexer.HadoopDruidIndexerConfig;
import org.apache.druid.indexer.path.StaticPathSpec;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.Job;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@ -32,8 +33,6 @@ import org.junit.runners.Parameterized;
import java.io.IOException;
import java.util.List;
import static org.junit.Assert.assertEquals;
@RunWith(Parameterized.class)
public class TimestampsParquetInputTest extends BaseParquetInputTest
{
@ -70,10 +69,10 @@ public class TimestampsParquetInputTest extends BaseParquetInputTest
);
List<InputRow> rowsWithString = getAllRows(parserType, configTimeAsString);
List<InputRow> rowsWithDate = getAllRows(parserType, configTimeAsDate);
assertEquals(rowsWithDate.size(), rowsWithString.size());
Assert.assertEquals(rowsWithDate.size(), rowsWithString.size());
for (int i = 0; i < rowsWithDate.size(); i++) {
assertEquals(rowsWithString.get(i).getTimestamp(), rowsWithDate.get(i).getTimestamp());
Assert.assertEquals(rowsWithString.get(i).getTimestamp(), rowsWithDate.get(i).getTimestamp());
}
}
@ -96,7 +95,7 @@ public class TimestampsParquetInputTest extends BaseParquetInputTest
Object data = getFirstRow(job, parserType, ((StaticPathSpec) config.getPathSpec()).getPaths());
List<InputRow> rows = (List<InputRow>) config.getParser().parseBatch(data);
assertEquals("2001-01-01T01:01:01.000Z", rows.get(0).getTimestamp().toString());
Assert.assertEquals("2001-01-01T01:01:01.000Z", rows.get(0).getTimestamp().toString());
}
@Test
@ -109,6 +108,6 @@ public class TimestampsParquetInputTest extends BaseParquetInputTest
);
config.intoConfiguration(job);
List<InputRow> rows = getAllRows(parserType, config);
assertEquals("1970-01-01T00:00:00.010Z", rows.get(0).getTimestamp().toString());
Assert.assertEquals("1970-01-01T00:00:00.010Z", rows.get(0).getTimestamp().toString());
}
}

View File

@ -25,6 +25,7 @@ import org.apache.druid.indexer.HadoopDruidIndexerConfig;
import org.apache.druid.indexer.path.StaticPathSpec;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapreduce.Job;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@ -32,8 +33,6 @@ import org.junit.runners.Parameterized;
import java.io.IOException;
import java.util.List;
import static org.junit.Assert.assertEquals;
@RunWith(Parameterized.class)
public class WikiParquetInputTest extends BaseParquetInputTest
{
@ -67,10 +66,10 @@ public class WikiParquetInputTest extends BaseParquetInputTest
Object data = getFirstRow(job, parserType, ((StaticPathSpec) config.getPathSpec()).getPaths());
List<InputRow> rows = (List<InputRow>) config.getParser().parseBatch(data);
assertEquals(rows.get(0).getDimension("page").get(0), "Gypsy Danger");
Assert.assertEquals("Gypsy Danger", rows.get(0).getDimension("page").get(0));
String s1 = rows.get(0).getDimension("language").get(0);
String s2 = rows.get(0).getDimension("language").get(1);
assertEquals("en", s1);
assertEquals("zh", s2);
Assert.assertEquals("en", s1);
Assert.assertEquals("zh", s2);
}
}

View File

@ -36,6 +36,7 @@ import org.apache.druid.js.JavaScriptConfig;
import org.hamcrest.CoreMatchers;
import org.joda.time.DateTime;
import org.joda.time.chrono.ISOChronology;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
@ -45,8 +46,6 @@ import java.io.ByteArrayOutputStream;
import java.nio.ByteBuffer;
import java.util.List;
import static org.junit.Assert.assertEquals;
public class ProtobufInputRowParserTest
{
@Rule
@ -127,12 +126,11 @@ public class ProtobufInputRowParserTest
@Test
public void testParse() throws Exception
{
//configure parser with desc file
ProtobufInputRowParser parser = new ProtobufInputRowParser(parseSpec, "prototest.desc", "ProtoTestEvent");
//create binary of proto test event
DateTime dateTime = new DateTime(2012, 07, 12, 9, 30, ISOChronology.getInstanceUTC());
DateTime dateTime = new DateTime(2012, 7, 12, 9, 30, ISOChronology.getInstanceUTC());
ProtoTestEventWrapper.ProtoTestEvent event = ProtoTestEventWrapper.ProtoTestEvent.newBuilder()
.setDescription("description")
.setEventType(ProtoTestEventWrapper.ProtoTestEvent.EventCategory.CATEGORY_ONE)
@ -160,7 +158,7 @@ public class ProtobufInputRowParserTest
InputRow row = parser.parseBatch(ByteBuffer.wrap(out.toByteArray())).get(0);
System.out.println(row);
assertEquals(dateTime.getMillis(), row.getTimestampFromEpoch());
Assert.assertEquals(dateTime.getMillis(), row.getTimestampFromEpoch());
assertDimensionEquals(row, "id", "4711");
assertDimensionEquals(row, "isValid", "true");
@ -172,9 +170,9 @@ public class ProtobufInputRowParserTest
assertDimensionEquals(row, "bar0", "bar0");
assertEquals(47.11F, row.getMetric("someFloatColumn").floatValue(), 0.0);
assertEquals(815.0F, row.getMetric("someIntColumn").floatValue(), 0.0);
assertEquals(816.0F, row.getMetric("someLongColumn").floatValue(), 0.0);
Assert.assertEquals(47.11F, row.getMetric("someFloatColumn").floatValue(), 0.0);
Assert.assertEquals(815.0F, row.getMetric("someIntColumn").floatValue(), 0.0);
Assert.assertEquals(816.0F, row.getMetric("someLongColumn").floatValue(), 0.0);
}
@Test
@ -200,13 +198,14 @@ public class ProtobufInputRowParserTest
expectedException.expect(CoreMatchers.instanceOf(IllegalStateException.class));
expectedException.expectMessage("JavaScript is disabled");
//noinspection ResultOfMethodCallIgnored (this method call will trigger the expected exception)
parser.parseBatch(ByteBuffer.allocate(1)).get(0);
}
private void assertDimensionEquals(InputRow row, String dimension, Object expected)
{
List<String> values = row.getDimension(dimension);
assertEquals(1, values.size());
assertEquals(expected, values.get(0));
Assert.assertEquals(1, values.size());
Assert.assertEquals(expected, values.get(0));
}
}

View File

@ -30,6 +30,7 @@ import org.apache.druid.common.aws.AWSModule;
import org.apache.druid.common.aws.AWSProxyConfig;
import org.apache.druid.metadata.DefaultPasswordProvider;
import org.easymock.EasyMock;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
@ -39,9 +40,6 @@ import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertTrue;
public class TestAWSCredentialsProvider
{
@Rule
@ -60,8 +58,8 @@ public class TestAWSCredentialsProvider
AWSCredentialsProvider provider = awsModule.getAWSCredentialsProvider(config);
AWSCredentials credentials = provider.getCredentials();
assertEquals(credentials.getAWSAccessKeyId(), "accessKeySample");
assertEquals(credentials.getAWSSecretKey(), "secretKeySample");
Assert.assertEquals("accessKeySample", credentials.getAWSAccessKeyId());
Assert.assertEquals("secretKeySample", credentials.getAWSSecretKey());
// try to create
s3Module.getAmazonS3Client(
@ -88,11 +86,11 @@ public class TestAWSCredentialsProvider
AWSCredentialsProvider provider = awsModule.getAWSCredentialsProvider(config);
AWSCredentials credentials = provider.getCredentials();
assertTrue(credentials instanceof AWSSessionCredentials);
Assert.assertTrue(credentials instanceof AWSSessionCredentials);
AWSSessionCredentials sessionCredentials = (AWSSessionCredentials) credentials;
assertEquals(sessionCredentials.getAWSAccessKeyId(), "accessKeySample");
assertEquals(sessionCredentials.getAWSSecretKey(), "secretKeySample");
assertEquals(sessionCredentials.getSessionToken(), "sessionTokenSample");
Assert.assertEquals("accessKeySample", sessionCredentials.getAWSAccessKeyId());
Assert.assertEquals("secretKeySample", sessionCredentials.getAWSSecretKey());
Assert.assertEquals("sessionTokenSample", sessionCredentials.getSessionToken());
// try to create
s3Module.getAmazonS3Client(

View File

@ -22,6 +22,7 @@ package org.apache.druid.storage.s3;
import com.amazonaws.auth.AWSSessionCredentials;
import com.google.common.io.Files;
import org.apache.druid.common.aws.FileSessionCredentialsProvider;
import org.junit.Assert;
import org.junit.Rule;
import org.junit.Test;
import org.junit.rules.TemporaryFolder;
@ -31,8 +32,6 @@ import java.io.File;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import static org.junit.Assert.assertEquals;
public class TestFileSessionCredentialsProvider
{
@Rule
@ -48,8 +47,8 @@ public class TestFileSessionCredentialsProvider
FileSessionCredentialsProvider provider = new FileSessionCredentialsProvider(file.getAbsolutePath());
AWSSessionCredentials sessionCredentials = (AWSSessionCredentials) provider.getCredentials();
assertEquals(sessionCredentials.getSessionToken(), "sessionTokenSample");
assertEquals(sessionCredentials.getAWSAccessKeyId(), "accessKeySample");
assertEquals(sessionCredentials.getAWSSecretKey(), "secretKeySample");
Assert.assertEquals("sessionTokenSample", sessionCredentials.getSessionToken());
Assert.assertEquals("accessKeySample", sessionCredentials.getAWSAccessKeyId());
Assert.assertEquals("secretKeySample", sessionCredentials.getAWSSecretKey());
}
}

View File

@ -48,16 +48,13 @@ import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.replay;
public class RemoteTaskActionClientTest
{
@Rule
public ExpectedException expectedException = ExpectedException.none();
private DruidLeaderClient druidLeaderClient;
private ObjectMapper objectMapper = new DefaultObjectMapper();
private final ObjectMapper objectMapper = new DefaultObjectMapper();
@Before
public void setUp()
@ -69,11 +66,11 @@ public class RemoteTaskActionClientTest
public void testSubmitSimple() throws Exception
{
Request request = new Request(HttpMethod.POST, new URL("http://localhost:1234/xx"));
expect(druidLeaderClient.makeRequest(HttpMethod.POST, "/druid/indexer/v1/action"))
.andReturn(request);
EasyMock.expect(druidLeaderClient.makeRequest(HttpMethod.POST, "/druid/indexer/v1/action"))
.andReturn(request);
// return status code 200 and a list with size equals 1
Map<String, Object> responseBody = new HashMap<String, Object>();
Map<String, Object> responseBody = new HashMap<>();
final List<TaskLock> expectedLocks = Collections.singletonList(new TaskLock(
TaskLockType.SHARED,
"groupId",
@ -91,8 +88,8 @@ public class RemoteTaskActionClientTest
);
// set up mocks
expect(druidLeaderClient.go(request)).andReturn(responseHolder);
replay(druidLeaderClient);
EasyMock.expect(druidLeaderClient.go(request)).andReturn(responseHolder);
EasyMock.replay(druidLeaderClient);
Task task = new NoopTask("id", null, 0, 0, null, null, null);
RemoteTaskActionClient client = new RemoteTaskActionClient(
@ -112,8 +109,8 @@ public class RemoteTaskActionClientTest
{
// return status code 400 and a list with size equals 1
Request request = new Request(HttpMethod.POST, new URL("http://localhost:1234/xx"));
expect(druidLeaderClient.makeRequest(HttpMethod.POST, "/druid/indexer/v1/action"))
.andReturn(request);
EasyMock.expect(druidLeaderClient.makeRequest(HttpMethod.POST, "/druid/indexer/v1/action"))
.andReturn(request);
// return status code 200 and a list with size equals 1
FullResponseHolder responseHolder = new FullResponseHolder(
@ -123,8 +120,8 @@ public class RemoteTaskActionClientTest
);
// set up mocks
expect(druidLeaderClient.go(request)).andReturn(responseHolder);
replay(druidLeaderClient);
EasyMock.expect(druidLeaderClient.go(request)).andReturn(responseHolder);
EasyMock.replay(druidLeaderClient);
Task task = new NoopTask("id", null, 0, 0, null, null, null);
RemoteTaskActionClient client = new RemoteTaskActionClient(

View File

@ -40,10 +40,6 @@ import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.capture;
import static org.easymock.EasyMock.eq;
@RunWith(EasyMockRunner.class)
public class SupervisorManagerTest extends EasyMockSupport
{
@ -107,7 +103,7 @@ public class SupervisorManagerTest extends EasyMockSupport
verifyAll();
resetAll();
metadataSupervisorManager.insert(eq("id1"), anyObject(NoopSupervisorSpec.class));
metadataSupervisorManager.insert(EasyMock.eq("id1"), EasyMock.anyObject(NoopSupervisorSpec.class));
supervisor2.stop(true);
replayAll();
@ -314,7 +310,7 @@ public class SupervisorManagerTest extends EasyMockSupport
// mock suspend, which stops supervisor1 and sets suspended state in metadata, flipping to supervisor2
// in TestSupervisorSpec implementation of createSuspendedSpec
resetAll();
metadataSupervisorManager.insert(eq("id1"), capture(capturedInsert));
metadataSupervisorManager.insert(EasyMock.eq("id1"), EasyMock.capture(capturedInsert));
supervisor2.start();
supervisor1.stop(true);
replayAll();
@ -328,7 +324,7 @@ public class SupervisorManagerTest extends EasyMockSupport
// mock resume, which stops supervisor2 and sets suspended to false in metadata, flipping to supervisor1
// in TestSupervisorSpec implementation of createRunningSpec
resetAll();
metadataSupervisorManager.insert(eq("id1"), capture(capturedInsert));
metadataSupervisorManager.insert(EasyMock.eq("id1"), EasyMock.capture(capturedInsert));
supervisor2.stop(true);
supervisor1.start();
replayAll();
@ -341,7 +337,7 @@ public class SupervisorManagerTest extends EasyMockSupport
// mock stop of suspended then resumed supervisor
resetAll();
metadataSupervisorManager.insert(eq("id1"), anyObject(NoopSupervisorSpec.class));
metadataSupervisorManager.insert(EasyMock.eq("id1"), EasyMock.anyObject(NoopSupervisorSpec.class));
supervisor1.stop(true);
replayAll();
@ -371,12 +367,12 @@ public class SupervisorManagerTest extends EasyMockSupport
private final Supervisor suspendedSupervisor;
public TestSupervisorSpec(String id, Supervisor supervisor)
TestSupervisorSpec(String id, Supervisor supervisor)
{
this(id, supervisor, false, null);
}
public TestSupervisorSpec(String id, Supervisor supervisor, boolean suspended, Supervisor suspendedSupervisor)
TestSupervisorSpec(String id, Supervisor supervisor, boolean suspended, Supervisor suspendedSupervisor)
{
this.id = id;
this.supervisor = supervisor;

View File

@ -69,6 +69,7 @@ import org.apache.druid.segment.indexing.DataSchema;
import org.apache.druid.segment.indexing.granularity.UniformGranularitySpec;
import org.apache.druid.segment.realtime.firehose.ChatHandlerProvider;
import org.apache.druid.server.security.AuthorizerMapper;
import org.easymock.EasyMock;
import org.easymock.EasyMockSupport;
import org.joda.time.DateTime;
import org.joda.time.Duration;
@ -89,12 +90,6 @@ import java.util.TreeMap;
import java.util.concurrent.Executor;
import java.util.concurrent.ScheduledExecutorService;
import static org.easymock.EasyMock.anyInt;
import static org.easymock.EasyMock.anyLong;
import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.anyString;
import static org.easymock.EasyMock.expect;
public class SeekableStreamSupervisorStateTest extends EasyMockSupport
{
private static final ObjectMapper objectMapper = TestHelper.makeJsonMapper();
@ -134,31 +129,37 @@ public class SeekableStreamSupervisorStateTest extends EasyMockSupport
supervisorConfig = new SupervisorStateManagerConfig();
expect(spec.getSupervisorStateManagerConfig()).andReturn(supervisorConfig).anyTimes();
EasyMock.expect(spec.getSupervisorStateManagerConfig()).andReturn(supervisorConfig).anyTimes();
expect(spec.getDataSchema()).andReturn(getDataSchema()).anyTimes();
expect(spec.getIoConfig()).andReturn(getIOConfig()).anyTimes();
expect(spec.getTuningConfig()).andReturn(getTuningConfig()).anyTimes();
EasyMock.expect(spec.getDataSchema()).andReturn(getDataSchema()).anyTimes();
EasyMock.expect(spec.getIoConfig()).andReturn(getIOConfig()).anyTimes();
EasyMock.expect(spec.getTuningConfig()).andReturn(getTuningConfig()).anyTimes();
expect(taskClientFactory.build(anyObject(), anyString(), anyInt(), anyObject(), anyLong())).andReturn(
EasyMock.expect(taskClientFactory.build(
EasyMock.anyObject(),
EasyMock.anyString(),
EasyMock.anyInt(),
EasyMock.anyObject(),
EasyMock.anyLong()
)).andReturn(
indexTaskClient).anyTimes();
expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes();
expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes();
EasyMock.expect(taskMaster.getTaskRunner()).andReturn(Optional.of(taskRunner)).anyTimes();
EasyMock.expect(taskMaster.getTaskQueue()).andReturn(Optional.of(taskQueue)).anyTimes();
taskRunner.registerListener(anyObject(TaskRunnerListener.class), anyObject(Executor.class));
taskRunner.registerListener(EasyMock.anyObject(TaskRunnerListener.class), EasyMock.anyObject(Executor.class));
expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn(null).anyTimes();
expect(recordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard0Partition)).anyTimes();
expect(recordSupplier.getLatestSequenceNumber(anyObject())).andReturn("10").anyTimes();
EasyMock.expect(indexerMetadataStorageCoordinator.getDataSourceMetadata(DATASOURCE)).andReturn(null).anyTimes();
EasyMock.expect(recordSupplier.getAssignment()).andReturn(ImmutableSet.of(shard0Partition)).anyTimes();
EasyMock.expect(recordSupplier.getLatestSequenceNumber(EasyMock.anyObject())).andReturn("10").anyTimes();
}
@Test
public void testRunning() throws Exception
{
expect(spec.isSuspended()).andReturn(false).anyTimes();
expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).anyTimes();
expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes();
expect(taskQueue.add(anyObject())).andReturn(true).anyTimes();
EasyMock.expect(spec.isSuspended()).andReturn(false).anyTimes();
EasyMock.expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).anyTimes();
EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes();
EasyMock.expect(taskQueue.add(EasyMock.anyObject())).andReturn(true).anyTimes();
replayAll();
@ -194,11 +195,12 @@ public class SeekableStreamSupervisorStateTest extends EasyMockSupport
@Test
public void testConnectingToStreamFail() throws Exception
{
expect(spec.isSuspended()).andReturn(false).anyTimes();
expect(recordSupplier.getPartitionIds(STREAM)).andThrow(new StreamException(new IllegalStateException(EXCEPTION_MSG)))
.anyTimes();
expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes();
expect(taskQueue.add(anyObject())).andReturn(true).anyTimes();
EasyMock.expect(spec.isSuspended()).andReturn(false).anyTimes();
EasyMock.expect(recordSupplier.getPartitionIds(STREAM))
.andThrow(new StreamException(new IllegalStateException(EXCEPTION_MSG)))
.anyTimes();
EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes();
EasyMock.expect(taskQueue.add(EasyMock.anyObject())).andReturn(true).anyTimes();
replayAll();
@ -248,13 +250,17 @@ public class SeekableStreamSupervisorStateTest extends EasyMockSupport
@Test
public void testConnectingToStreamFailRecoveryFailRecovery() throws Exception
{
expect(spec.isSuspended()).andReturn(false).anyTimes();
expect(recordSupplier.getPartitionIds(STREAM)).andThrow(new StreamException(new IllegalStateException())).times(3);
expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).times(3);
expect(recordSupplier.getPartitionIds(STREAM)).andThrow(new StreamException(new IllegalStateException())).times(3);
expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).times(3);
expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes();
expect(taskQueue.add(anyObject())).andReturn(true).anyTimes();
EasyMock.expect(spec.isSuspended()).andReturn(false).anyTimes();
EasyMock.expect(recordSupplier.getPartitionIds(STREAM))
.andThrow(new StreamException(new IllegalStateException()))
.times(3);
EasyMock.expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).times(3);
EasyMock.expect(recordSupplier.getPartitionIds(STREAM))
.andThrow(new StreamException(new IllegalStateException()))
.times(3);
EasyMock.expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).times(3);
EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes();
EasyMock.expect(taskQueue.add(EasyMock.anyObject())).andReturn(true).anyTimes();
replayAll();
@ -317,12 +323,12 @@ public class SeekableStreamSupervisorStateTest extends EasyMockSupport
@Test
public void testDiscoveringInitialTasksFailRecoveryFail() throws Exception
{
expect(spec.isSuspended()).andReturn(false).anyTimes();
expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).anyTimes();
expect(taskStorage.getActiveTasks()).andThrow(new IllegalStateException(EXCEPTION_MSG)).times(3);
expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).times(3);
expect(taskStorage.getActiveTasks()).andThrow(new IllegalStateException(EXCEPTION_MSG)).times(3);
expect(taskQueue.add(anyObject())).andReturn(true).anyTimes();
EasyMock.expect(spec.isSuspended()).andReturn(false).anyTimes();
EasyMock.expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).anyTimes();
EasyMock.expect(taskStorage.getActiveTasks()).andThrow(new IllegalStateException(EXCEPTION_MSG)).times(3);
EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).times(3);
EasyMock.expect(taskStorage.getActiveTasks()).andThrow(new IllegalStateException(EXCEPTION_MSG)).times(3);
EasyMock.expect(taskQueue.add(EasyMock.anyObject())).andReturn(true).anyTimes();
replayAll();
@ -393,12 +399,12 @@ public class SeekableStreamSupervisorStateTest extends EasyMockSupport
@Test
public void testCreatingTasksFailRecoveryFail() throws Exception
{
expect(spec.isSuspended()).andReturn(false).anyTimes();
expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).anyTimes();
expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes();
expect(taskQueue.add(anyObject())).andThrow(new IllegalStateException(EXCEPTION_MSG)).times(3);
expect(taskQueue.add(anyObject())).andReturn(true).times(3);
expect(taskQueue.add(anyObject())).andThrow(new IllegalStateException(EXCEPTION_MSG)).times(3);
EasyMock.expect(spec.isSuspended()).andReturn(false).anyTimes();
EasyMock.expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).anyTimes();
EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes();
EasyMock.expect(taskQueue.add(EasyMock.anyObject())).andThrow(new IllegalStateException(EXCEPTION_MSG)).times(3);
EasyMock.expect(taskQueue.add(EasyMock.anyObject())).andReturn(true).times(3);
EasyMock.expect(taskQueue.add(EasyMock.anyObject())).andThrow(new IllegalStateException(EXCEPTION_MSG)).times(3);
replayAll();
@ -470,10 +476,10 @@ public class SeekableStreamSupervisorStateTest extends EasyMockSupport
@Test
public void testSuspended() throws Exception
{
expect(spec.isSuspended()).andReturn(true).anyTimes();
expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).anyTimes();
expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes();
expect(taskQueue.add(anyObject())).andReturn(true).anyTimes();
EasyMock.expect(spec.isSuspended()).andReturn(true).anyTimes();
EasyMock.expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).anyTimes();
EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes();
EasyMock.expect(taskQueue.add(EasyMock.anyObject())).andReturn(true).anyTimes();
replayAll();
@ -509,10 +515,10 @@ public class SeekableStreamSupervisorStateTest extends EasyMockSupport
@Test
public void testStopping() throws Exception
{
expect(spec.isSuspended()).andReturn(false).anyTimes();
expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).anyTimes();
expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes();
expect(taskQueue.add(anyObject())).andReturn(true).anyTimes();
EasyMock.expect(spec.isSuspended()).andReturn(false).anyTimes();
EasyMock.expect(recordSupplier.getPartitionIds(STREAM)).andReturn(ImmutableSet.of(SHARD_ID)).anyTimes();
EasyMock.expect(taskStorage.getActiveTasks()).andReturn(ImmutableList.of()).anyTimes();
EasyMock.expect(taskQueue.add(EasyMock.anyObject())).andReturn(true).anyTimes();
taskRunner.unregisterListener("testSupervisorId");
indexTaskClient.close();

View File

@ -34,8 +34,6 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static org.apache.druid.query.QueryRunnerTestHelper.dependentPostAggMetric;
public class AggregatorUtilTest
{
@ -130,7 +128,7 @@ public class AggregatorUtilTest
Pair<List<AggregatorFactory>, List<PostAggregator>> aggregatorsPair = AggregatorUtil.condensedAggregators(
aggregatorFactories,
postAggregatorList,
dependentPostAggMetric
QueryRunnerTestHelper.dependentPostAggMetric
);
// verify aggregators
Assert.assertEquals(

View File

@ -23,6 +23,7 @@ import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import org.apache.druid.java.util.common.Cacheable;
import org.apache.druid.java.util.common.StringUtils;
import org.junit.Assert;
import org.junit.Test;
import java.nio.ByteBuffer;
@ -31,23 +32,12 @@ import java.util.Arrays;
import java.util.Collections;
import java.util.List;
import static org.junit.Assert.assertArrayEquals;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
public class CacheKeyBuilderTest
{
@Test
public void testCacheKeyBuilder()
{
final Cacheable cacheable = new Cacheable()
{
@Override
public byte[] getCacheKey()
{
return new byte[]{10, 20};
}
};
final Cacheable cacheable = () -> new byte[]{10, 20};
final byte[] actual = new CacheKeyBuilder((byte) 10)
.appendBoolean(false)
@ -75,7 +65,7 @@ public class CacheKeyBuilderTest
+ cacheable.getCacheKey().length // cacheable
+ Integer.BYTES + 4 // cacheable list
+ 11; // type keys
assertEquals(expectedSize, actual.length);
Assert.assertEquals(expectedSize, actual.length);
final byte[] expected = ByteBuffer.allocate(expectedSize)
.put((byte) 10)
@ -108,7 +98,7 @@ public class CacheKeyBuilderTest
.put(cacheable.getCacheKey())
.array();
assertArrayEquals(expected, actual);
Assert.assertArrayEquals(expected, actual);
}
@Test
@ -122,25 +112,11 @@ public class CacheKeyBuilderTest
.appendStringsIgnoringOrder(Lists.newArrayList("BA", "AB"))
.build();
assertArrayEquals(key1, key2);
Assert.assertArrayEquals(key1, key2);
final Cacheable cacheable1 = new Cacheable()
{
@Override
public byte[] getCacheKey()
{
return new byte[]{1};
}
};
final Cacheable cacheable1 = () -> new byte[]{1};
final Cacheable cacheable2 = new Cacheable()
{
@Override
public byte[] getCacheKey()
{
return new byte[]{2};
}
};
final Cacheable cacheable2 = () -> new byte[]{2};
key1 = new CacheKeyBuilder((byte) 10)
.appendCacheablesIgnoringOrder(Lists.newArrayList(cacheable1, cacheable2))
@ -150,7 +126,7 @@ public class CacheKeyBuilderTest
.appendCacheablesIgnoringOrder(Lists.newArrayList(cacheable2, cacheable1))
.build();
assertArrayEquals(key1, key2);
Assert.assertArrayEquals(key1, key2);
}
@Test
@ -222,23 +198,9 @@ public class CacheKeyBuilderTest
@Test
public void testNotEqualCacheables()
{
final Cacheable test = new Cacheable()
{
@Override
public byte[] getCacheKey()
{
return StringUtils.toUtf8("test");
}
};
final Cacheable test = () -> StringUtils.toUtf8("test");
final Cacheable testtest = new Cacheable()
{
@Override
public byte[] getCacheKey()
{
return StringUtils.toUtf8("testtest");
}
};
final Cacheable testtest = () -> StringUtils.toUtf8("testtest");
final List<byte[]> keys = new ArrayList<>();
keys.add(
@ -287,7 +249,7 @@ public class CacheKeyBuilderTest
{
for (int i = 0; i < keys.size(); i++) {
for (int j = i + 1; j < keys.size(); j++) {
assertFalse(Arrays.equals(keys.get(i), keys.get(j)));
Assert.assertFalse(Arrays.equals(keys.get(i), keys.get(j)));
}
}
}
@ -303,17 +265,17 @@ public class CacheKeyBuilderTest
.appendStrings(Collections.singletonList(""))
.build();
assertFalse(Arrays.equals(key1, key2));
Assert.assertFalse(Arrays.equals(key1, key2));
key1 = new CacheKeyBuilder((byte) 10)
.appendStrings(Collections.singletonList(""))
.build();
key2 = new CacheKeyBuilder((byte) 10)
.appendStrings(Collections.singletonList((String) null))
.appendStrings(Collections.singletonList(null))
.build();
assertArrayEquals(key1, key2);
Assert.assertArrayEquals(key1, key2);
}
@Test
@ -324,10 +286,10 @@ public class CacheKeyBuilderTest
.build();
final byte[] key2 = new CacheKeyBuilder((byte) 10)
.appendCacheables(Collections.singletonList((Cacheable) null))
.appendCacheables(Collections.singletonList(null))
.build();
assertFalse(Arrays.equals(key1, key2));
Assert.assertFalse(Arrays.equals(key1, key2));
}
@Test
@ -348,34 +310,13 @@ public class CacheKeyBuilderTest
.put(StringUtils.toUtf8("test2"))
.array();
assertArrayEquals(expected, actual);
Assert.assertArrayEquals(expected, actual);
final Cacheable c1 = new Cacheable()
{
@Override
public byte[] getCacheKey()
{
return StringUtils.toUtf8("te");
}
};
final Cacheable c1 = () -> StringUtils.toUtf8("te");
final Cacheable c2 = new Cacheable()
{
@Override
public byte[] getCacheKey()
{
return StringUtils.toUtf8("test1");
}
};
final Cacheable c2 = () -> StringUtils.toUtf8("test1");
final Cacheable c3 = new Cacheable()
{
@Override
public byte[] getCacheKey()
{
return StringUtils.toUtf8("test2");
}
};
final Cacheable c3 = () -> StringUtils.toUtf8("test2");
actual = new CacheKeyBuilder((byte) 10)
.appendCacheablesIgnoringOrder(Lists.newArrayList(c3, c2, c1))
@ -390,6 +331,6 @@ public class CacheKeyBuilderTest
.put(c3.getCacheKey())
.array();
assertArrayEquals(expected, actual);
Assert.assertArrayEquals(expected, actual);
}
}

View File

@ -42,6 +42,7 @@ import org.apache.druid.query.groupby.strategy.GroupByStrategySelector;
import org.apache.druid.query.groupby.strategy.GroupByStrategyV1;
import org.apache.druid.query.groupby.strategy.GroupByStrategyV2;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
@ -54,8 +55,6 @@ import java.util.Collection;
import java.util.Collections;
import java.util.List;
import static org.junit.Assert.assertEquals;
@RunWith(Parameterized.class)
public class GroupByQueryMergeBufferTest
{
@ -65,7 +64,7 @@ public class GroupByQueryMergeBufferTest
{
private int minRemainBufferNum;
public TestBlockingPool(Supplier<ByteBuffer> generator, int limit)
TestBlockingPool(Supplier<ByteBuffer> generator, int limit)
{
super(generator, limit);
minRemainBufferNum = limit;
@ -93,18 +92,18 @@ public class GroupByQueryMergeBufferTest
return holder;
}
public void resetMinRemainBufferNum()
void resetMinRemainBufferNum()
{
minRemainBufferNum = PROCESSING_CONFIG.getNumMergeBuffers();
}
public int getMinRemainBufferNum()
int getMinRemainBufferNum()
{
return minRemainBufferNum;
}
}
public static final DruidProcessingConfig PROCESSING_CONFIG = new DruidProcessingConfig()
private static final DruidProcessingConfig PROCESSING_CONFIG = new DruidProcessingConfig()
{
@Override
public String getFormatString()
@ -164,25 +163,11 @@ public class GroupByQueryMergeBufferTest
private static final CloseableStupidPool<ByteBuffer> bufferPool = new CloseableStupidPool<>(
"GroupByQueryEngine-bufferPool",
new Supplier<ByteBuffer>()
{
@Override
public ByteBuffer get()
{
return ByteBuffer.allocateDirect(PROCESSING_CONFIG.intermediateComputeSizeBytes());
}
}
() -> ByteBuffer.allocateDirect(PROCESSING_CONFIG.intermediateComputeSizeBytes())
);
private static final TestBlockingPool mergeBufferPool = new TestBlockingPool(
new Supplier<ByteBuffer>()
{
@Override
public ByteBuffer get()
{
return ByteBuffer.allocateDirect(PROCESSING_CONFIG.intermediateComputeSizeBytes());
}
},
() -> ByteBuffer.allocateDirect(PROCESSING_CONFIG.intermediateComputeSizeBytes()),
PROCESSING_CONFIG.getNumMergeBuffers()
);
@ -198,7 +183,7 @@ public class GroupByQueryMergeBufferTest
}
);
private QueryRunner<Row> runner;
private final QueryRunner<Row> runner;
@AfterClass
public static void teardownClass()
@ -242,8 +227,8 @@ public class GroupByQueryMergeBufferTest
GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
assertEquals(2, mergeBufferPool.getMinRemainBufferNum());
assertEquals(3, mergeBufferPool.getPoolSize());
Assert.assertEquals(2, mergeBufferPool.getMinRemainBufferNum());
Assert.assertEquals(3, mergeBufferPool.getPoolSize());
}
@Test
@ -270,8 +255,8 @@ public class GroupByQueryMergeBufferTest
GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
assertEquals(1, mergeBufferPool.getMinRemainBufferNum());
assertEquals(3, mergeBufferPool.getPoolSize());
Assert.assertEquals(1, mergeBufferPool.getMinRemainBufferNum());
Assert.assertEquals(3, mergeBufferPool.getPoolSize());
}
@Test
@ -310,8 +295,8 @@ public class GroupByQueryMergeBufferTest
GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
// This should be 0 because the broker needs 2 buffers and the queryable node needs one.
assertEquals(0, mergeBufferPool.getMinRemainBufferNum());
assertEquals(3, mergeBufferPool.getPoolSize());
Assert.assertEquals(0, mergeBufferPool.getMinRemainBufferNum());
Assert.assertEquals(3, mergeBufferPool.getPoolSize());
}
@Test
@ -363,7 +348,7 @@ public class GroupByQueryMergeBufferTest
GroupByQueryRunnerTestHelper.runQuery(factory, runner, query);
// This should be 0 because the broker needs 2 buffers and the queryable node needs one.
assertEquals(0, mergeBufferPool.getMinRemainBufferNum());
assertEquals(3, mergeBufferPool.getPoolSize());
Assert.assertEquals(0, mergeBufferPool.getMinRemainBufferNum());
Assert.assertEquals(3, mergeBufferPool.getPoolSize());
}
}

View File

@ -32,12 +32,6 @@ import org.junit.Test;
import java.util.Map;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertNotEquals;
import static org.junit.Assert.assertTrue;
public class DimensionSelectorHavingSpecTest
{
@ -58,7 +52,7 @@ public class DimensionSelectorHavingSpecTest
);
ObjectMapper mapper = new DefaultObjectMapper();
assertEquals(dimHavingSpec, mapper.convertValue(dimSelectMap, DimensionSelectorHavingSpec.class));
Assert.assertEquals(dimHavingSpec, mapper.convertValue(dimSelectMap, DimensionSelectorHavingSpec.class));
}
@Test
@ -83,14 +77,13 @@ public class DimensionSelectorHavingSpecTest
HavingSpec dimHavingSpec13 = new DimensionSelectorHavingSpec("dim", "value", extractionFn1);
HavingSpec dimHavingSpec14 = new DimensionSelectorHavingSpec("dim", "value", extractionFn2);
assertEquals(dimHavingSpec1, dimHavingSpec2);
assertNotEquals(dimHavingSpec3, dimHavingSpec4);
assertNotEquals(dimHavingSpec5, dimHavingSpec6);
assertEquals(dimHavingSpec7, dimHavingSpec8);
assertNotEquals(dimHavingSpec9, dimHavingSpec10);
assertNotEquals(dimHavingSpec11, dimHavingSpec12);
assertNotEquals(dimHavingSpec13, dimHavingSpec14);
Assert.assertEquals(dimHavingSpec1, dimHavingSpec2);
Assert.assertNotEquals(dimHavingSpec3, dimHavingSpec4);
Assert.assertNotEquals(dimHavingSpec5, dimHavingSpec6);
Assert.assertEquals(dimHavingSpec7, dimHavingSpec8);
Assert.assertNotEquals(dimHavingSpec9, dimHavingSpec10);
Assert.assertNotEquals(dimHavingSpec11, dimHavingSpec12);
Assert.assertNotEquals(dimHavingSpec13, dimHavingSpec14);
}
@Test
@ -98,22 +91,23 @@ public class DimensionSelectorHavingSpecTest
{
ExtractionFn extractionFn = new RegexDimExtractionFn("^([^,]*),", false, "");
String expected = "DimensionSelectorHavingSpec{" +
"dimension='gender'," +
" value='m'," +
" extractionFn=regex(/^([^,]*),/, 1)}";
"dimension='gender'," +
" value='m'," +
" extractionFn=regex(/^([^,]*),/, 1)}";
Assert.assertEquals(expected, new DimensionSelectorHavingSpec("gender", "m", extractionFn).toString());
expected = "DimensionSelectorHavingSpec{" +
"dimension='gender'," +
" value='m'," +
" extractionFn=Identity}";
"dimension='gender'," +
" value='m'," +
" extractionFn=Identity}";
Assert.assertEquals(expected, new DimensionSelectorHavingSpec("gender", "m", null).toString());
}
@Test(expected = NullPointerException.class)
public void testNullDimension()
{
//noinspection ResultOfObjectAllocationIgnored (result is not needed)
new DimensionSelectorHavingSpec(null, "value", null);
}
@ -121,34 +115,33 @@ public class DimensionSelectorHavingSpecTest
public void testDimensionFilterSpec()
{
DimensionSelectorHavingSpec spec = new DimensionSelectorHavingSpec("dimension", "v", null);
assertTrue(spec.eval(getTestRow("v")));
assertTrue(spec.eval(getTestRow(ImmutableList.of("v", "v1"))));
assertFalse(spec.eval(getTestRow(ImmutableList.of())));
assertFalse(spec.eval(getTestRow("v1")));
Assert.assertTrue(spec.eval(getTestRow("v")));
Assert.assertTrue(spec.eval(getTestRow(ImmutableList.of("v", "v1"))));
Assert.assertFalse(spec.eval(getTestRow(ImmutableList.of())));
Assert.assertFalse(spec.eval(getTestRow("v1")));
spec = new DimensionSelectorHavingSpec("dimension", null, null);
assertTrue(spec.eval(getTestRow(ImmutableList.of())));
assertTrue(spec.eval(getTestRow(ImmutableList.of(""))));
assertFalse(spec.eval(getTestRow(ImmutableList.of("v"))));
assertFalse(spec.eval(getTestRow(ImmutableList.of("v", "v1"))));
Assert.assertTrue(spec.eval(getTestRow(ImmutableList.of())));
Assert.assertTrue(spec.eval(getTestRow(ImmutableList.of(""))));
Assert.assertFalse(spec.eval(getTestRow(ImmutableList.of("v"))));
Assert.assertFalse(spec.eval(getTestRow(ImmutableList.of("v", "v1"))));
spec = new DimensionSelectorHavingSpec("dimension", "", null);
assertTrue(spec.eval(getTestRow(ImmutableList.of())));
assertTrue(spec.eval(getTestRow(ImmutableList.of(""))));
assertTrue(spec.eval(getTestRow(ImmutableList.of("v", "v1", ""))));
assertFalse(spec.eval(getTestRow(ImmutableList.of("v"))));
assertFalse(spec.eval(getTestRow(ImmutableList.of("v", "v1"))));
Assert.assertTrue(spec.eval(getTestRow(ImmutableList.of())));
Assert.assertTrue(spec.eval(getTestRow(ImmutableList.of(""))));
Assert.assertTrue(spec.eval(getTestRow(ImmutableList.of("v", "v1", ""))));
Assert.assertFalse(spec.eval(getTestRow(ImmutableList.of("v"))));
Assert.assertFalse(spec.eval(getTestRow(ImmutableList.of("v", "v1"))));
ExtractionFn extractionFn = new RegexDimExtractionFn("^([^,]*),", true, "default");
spec = new DimensionSelectorHavingSpec("dimension", "v", extractionFn);
assertTrue(spec.eval(getTestRow(ImmutableList.of("v,v1", "v2,v3"))));
assertFalse(spec.eval(getTestRow(ImmutableList.of("v1,v4"))));
assertFalse(spec.eval(getTestRow(ImmutableList.of("v"))));
assertFalse(spec.eval(getTestRow(ImmutableList.of("v1", "default"))));
assertTrue(spec.eval(getTestRow(ImmutableList.of("v,default", "none"))));
spec = new DimensionSelectorHavingSpec("dimension", "default", extractionFn);
assertTrue(spec.eval(getTestRow(ImmutableList.of("v1,v2", "none"))));
Assert.assertTrue(spec.eval(getTestRow(ImmutableList.of("v,v1", "v2,v3"))));
Assert.assertFalse(spec.eval(getTestRow(ImmutableList.of("v1,v4"))));
Assert.assertFalse(spec.eval(getTestRow(ImmutableList.of("v"))));
Assert.assertFalse(spec.eval(getTestRow(ImmutableList.of("v1", "default"))));
Assert.assertTrue(spec.eval(getTestRow(ImmutableList.of("v,default", "none"))));
spec = new DimensionSelectorHavingSpec("dimension", "default", extractionFn);
Assert.assertTrue(spec.eval(getTestRow(ImmutableList.of("v1,v2", "none"))));
}
}

View File

@ -27,6 +27,7 @@ import org.apache.druid.data.input.Row;
import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.druid.java.util.common.StringUtils;
import org.apache.druid.query.cache.CacheKeyBuilder;
import org.junit.Assert;
import org.junit.Test;
import java.util.ArrayList;
@ -35,28 +36,23 @@ import java.util.List;
import java.util.Map;
import java.util.concurrent.atomic.AtomicInteger;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class HavingSpecTest
{
private static final Row ROW = new MapBasedInputRow(
0,
new ArrayList<>(),
ImmutableMap.of("metric", Float.valueOf(10))
ImmutableMap.of("metric", 10f)
);
@Test
public void testHavingClauseSerde()
{
List<HavingSpec> havings = Arrays.asList(
new GreaterThanHavingSpec("agg", Double.valueOf(1.3)),
new GreaterThanHavingSpec("agg", 1.3),
new OrHavingSpec(
Arrays.asList(
new LessThanHavingSpec("lessAgg", Long.valueOf(1L)),
new NotHavingSpec(new EqualToHavingSpec("equalAgg", Double.valueOf(2)))
new LessThanHavingSpec("lessAgg", 1L),
new NotHavingSpec(new EqualToHavingSpec("equalAgg", 2.0))
)
)
);
@ -91,7 +87,7 @@ public class HavingSpecTest
);
ObjectMapper mapper = new DefaultObjectMapper();
assertEquals(andHavingSpec, mapper.convertValue(payloadMap, AndHavingSpec.class));
Assert.assertEquals(andHavingSpec, mapper.convertValue(payloadMap, AndHavingSpec.class));
}
@Test(expected = IllegalArgumentException.class)
@ -110,111 +106,111 @@ public class HavingSpecTest
@Test
public void testGreaterThanHavingSpec()
{
GreaterThanHavingSpec spec = new GreaterThanHavingSpec("metric", Long.valueOf(Long.MAX_VALUE - 10));
assertFalse(spec.eval(getTestRow(Long.valueOf(Long.MAX_VALUE - 10))));
assertFalse(spec.eval(getTestRow(Long.valueOf(Long.MAX_VALUE - 15))));
assertTrue(spec.eval(getTestRow(Long.valueOf(Long.MAX_VALUE - 5))));
assertTrue(spec.eval(getTestRow(String.valueOf(Long.MAX_VALUE - 5))));
assertFalse(spec.eval(getTestRow(100.05f)));
GreaterThanHavingSpec spec = new GreaterThanHavingSpec("metric", Long.MAX_VALUE - 10);
Assert.assertFalse(spec.eval(getTestRow(Long.MAX_VALUE - 10)));
Assert.assertFalse(spec.eval(getTestRow(Long.MAX_VALUE - 15)));
Assert.assertTrue(spec.eval(getTestRow(Long.MAX_VALUE - 5)));
Assert.assertTrue(spec.eval(getTestRow(String.valueOf(Long.MAX_VALUE - 5))));
Assert.assertFalse(spec.eval(getTestRow(100.05f)));
spec = new GreaterThanHavingSpec("metric", 100.56f);
assertFalse(spec.eval(getTestRow(100.56f)));
assertFalse(spec.eval(getTestRow(90.53f)));
assertFalse(spec.eval(getTestRow("90.53f")));
assertTrue(spec.eval(getTestRow(101.34f)));
assertTrue(spec.eval(getTestRow(Long.MAX_VALUE)));
Assert.assertFalse(spec.eval(getTestRow(100.56f)));
Assert.assertFalse(spec.eval(getTestRow(90.53f)));
Assert.assertFalse(spec.eval(getTestRow("90.53f")));
Assert.assertTrue(spec.eval(getTestRow(101.34f)));
Assert.assertTrue(spec.eval(getTestRow(Long.MAX_VALUE)));
}
@Test
public void testLessThanHavingSpec()
{
LessThanHavingSpec spec = new LessThanHavingSpec("metric", Long.valueOf(Long.MAX_VALUE - 10));
assertFalse(spec.eval(getTestRow(Long.valueOf(Long.MAX_VALUE - 10))));
assertTrue(spec.eval(getTestRow(Long.valueOf(Long.MAX_VALUE - 15))));
assertTrue(spec.eval(getTestRow(String.valueOf(Long.MAX_VALUE - 15))));
assertFalse(spec.eval(getTestRow(Long.valueOf(Long.MAX_VALUE - 5))));
assertTrue(spec.eval(getTestRow(100.05f)));
LessThanHavingSpec spec = new LessThanHavingSpec("metric", Long.MAX_VALUE - 10);
Assert.assertFalse(spec.eval(getTestRow(Long.MAX_VALUE - 10)));
Assert.assertTrue(spec.eval(getTestRow(Long.MAX_VALUE - 15)));
Assert.assertTrue(spec.eval(getTestRow(String.valueOf(Long.MAX_VALUE - 15))));
Assert.assertFalse(spec.eval(getTestRow(Long.MAX_VALUE - 5)));
Assert.assertTrue(spec.eval(getTestRow(100.05f)));
spec = new LessThanHavingSpec("metric", 100.56f);
assertFalse(spec.eval(getTestRow(100.56f)));
assertTrue(spec.eval(getTestRow(90.53f)));
assertFalse(spec.eval(getTestRow(101.34f)));
assertFalse(spec.eval(getTestRow("101.34f")));
assertFalse(spec.eval(getTestRow(Long.MAX_VALUE)));
Assert.assertFalse(spec.eval(getTestRow(100.56f)));
Assert.assertTrue(spec.eval(getTestRow(90.53f)));
Assert.assertFalse(spec.eval(getTestRow(101.34f)));
Assert.assertFalse(spec.eval(getTestRow("101.34f")));
Assert.assertFalse(spec.eval(getTestRow(Long.MAX_VALUE)));
}
private Row getTestRow(Object metricValue)
{
return new MapBasedInputRow(0, new ArrayList<String>(), ImmutableMap.of("metric", metricValue));
return new MapBasedInputRow(0, new ArrayList<>(), ImmutableMap.of("metric", metricValue));
}
@Test
public void testEqualHavingSpec()
{
EqualToHavingSpec spec = new EqualToHavingSpec("metric", Long.valueOf(Long.MAX_VALUE - 10));
assertTrue(spec.eval(getTestRow(Long.valueOf(Long.MAX_VALUE - 10))));
assertFalse(spec.eval(getTestRow(Long.valueOf(Long.MAX_VALUE - 5))));
assertFalse(spec.eval(getTestRow(100.05f)));
EqualToHavingSpec spec = new EqualToHavingSpec("metric", Long.MAX_VALUE - 10);
Assert.assertTrue(spec.eval(getTestRow(Long.MAX_VALUE - 10)));
Assert.assertFalse(spec.eval(getTestRow(Long.MAX_VALUE - 5)));
Assert.assertFalse(spec.eval(getTestRow(100.05f)));
spec = new EqualToHavingSpec("metric", 100.56f);
assertFalse(spec.eval(getTestRow(100L)));
assertFalse(spec.eval(getTestRow(100.0)));
assertFalse(spec.eval(getTestRow(100d)));
assertFalse(spec.eval(getTestRow(100.56d))); // False since 100.56d != (double) 100.56f
assertFalse(spec.eval(getTestRow(90.53d)));
assertTrue(spec.eval(getTestRow(100.56f)));
assertFalse(spec.eval(getTestRow(90.53f)));
assertFalse(spec.eval(getTestRow(Long.MAX_VALUE)));
Assert.assertFalse(spec.eval(getTestRow(100L)));
Assert.assertFalse(spec.eval(getTestRow(100.0)));
Assert.assertFalse(spec.eval(getTestRow(100d)));
Assert.assertFalse(spec.eval(getTestRow(100.56d))); // False since 100.56d != (double) 100.56f
Assert.assertFalse(spec.eval(getTestRow(90.53d)));
Assert.assertTrue(spec.eval(getTestRow(100.56f)));
Assert.assertFalse(spec.eval(getTestRow(90.53f)));
Assert.assertFalse(spec.eval(getTestRow(Long.MAX_VALUE)));
spec = new EqualToHavingSpec("metric", 100.56d);
assertFalse(spec.eval(getTestRow(100L)));
assertFalse(spec.eval(getTestRow(100.0)));
assertFalse(spec.eval(getTestRow(100d)));
assertTrue(spec.eval(getTestRow(100.56d)));
assertFalse(spec.eval(getTestRow(90.53d)));
assertFalse(spec.eval(getTestRow(100.56f))); // False since 100.56d != (double) 100.56f
assertFalse(spec.eval(getTestRow(90.53f)));
assertFalse(spec.eval(getTestRow(Long.MAX_VALUE)));
Assert.assertFalse(spec.eval(getTestRow(100L)));
Assert.assertFalse(spec.eval(getTestRow(100.0)));
Assert.assertFalse(spec.eval(getTestRow(100d)));
Assert.assertTrue(spec.eval(getTestRow(100.56d)));
Assert.assertFalse(spec.eval(getTestRow(90.53d)));
Assert.assertFalse(spec.eval(getTestRow(100.56f))); // False since 100.56d != (double) 100.56f
Assert.assertFalse(spec.eval(getTestRow(90.53f)));
Assert.assertFalse(spec.eval(getTestRow(Long.MAX_VALUE)));
spec = new EqualToHavingSpec("metric", 100.0f);
assertTrue(spec.eval(getTestRow(100L)));
assertTrue(spec.eval(getTestRow(100.0)));
assertTrue(spec.eval(getTestRow(100d)));
assertFalse(spec.eval(getTestRow(100.56d)));
assertFalse(spec.eval(getTestRow(90.53d)));
assertFalse(spec.eval(getTestRow(100.56f)));
assertFalse(spec.eval(getTestRow(90.53f)));
assertFalse(spec.eval(getTestRow(Long.MAX_VALUE)));
Assert.assertTrue(spec.eval(getTestRow(100L)));
Assert.assertTrue(spec.eval(getTestRow(100.0)));
Assert.assertTrue(spec.eval(getTestRow(100d)));
Assert.assertFalse(spec.eval(getTestRow(100.56d)));
Assert.assertFalse(spec.eval(getTestRow(90.53d)));
Assert.assertFalse(spec.eval(getTestRow(100.56f)));
Assert.assertFalse(spec.eval(getTestRow(90.53f)));
Assert.assertFalse(spec.eval(getTestRow(Long.MAX_VALUE)));
spec = new EqualToHavingSpec("metric", 100.0d);
assertTrue(spec.eval(getTestRow(100L)));
assertTrue(spec.eval(getTestRow(100.0)));
assertTrue(spec.eval(getTestRow(100d)));
assertFalse(spec.eval(getTestRow(100.56d)));
assertFalse(spec.eval(getTestRow(90.53d)));
assertFalse(spec.eval(getTestRow(100.56f)));
assertFalse(spec.eval(getTestRow(90.53f)));
assertFalse(spec.eval(getTestRow(Long.MAX_VALUE)));
Assert.assertTrue(spec.eval(getTestRow(100L)));
Assert.assertTrue(spec.eval(getTestRow(100.0)));
Assert.assertTrue(spec.eval(getTestRow(100d)));
Assert.assertFalse(spec.eval(getTestRow(100.56d)));
Assert.assertFalse(spec.eval(getTestRow(90.53d)));
Assert.assertFalse(spec.eval(getTestRow(100.56f)));
Assert.assertFalse(spec.eval(getTestRow(90.53f)));
Assert.assertFalse(spec.eval(getTestRow(Long.MAX_VALUE)));
spec = new EqualToHavingSpec("metric", 100);
assertTrue(spec.eval(getTestRow(100L)));
assertTrue(spec.eval(getTestRow(100.0)));
assertTrue(spec.eval(getTestRow(100d)));
assertFalse(spec.eval(getTestRow(100.56d)));
assertFalse(spec.eval(getTestRow(90.53d)));
assertFalse(spec.eval(getTestRow(100.56f)));
assertFalse(spec.eval(getTestRow(90.53f)));
assertFalse(spec.eval(getTestRow(Long.MAX_VALUE)));
Assert.assertTrue(spec.eval(getTestRow(100L)));
Assert.assertTrue(spec.eval(getTestRow(100.0)));
Assert.assertTrue(spec.eval(getTestRow(100d)));
Assert.assertFalse(spec.eval(getTestRow(100.56d)));
Assert.assertFalse(spec.eval(getTestRow(90.53d)));
Assert.assertFalse(spec.eval(getTestRow(100.56f)));
Assert.assertFalse(spec.eval(getTestRow(90.53f)));
Assert.assertFalse(spec.eval(getTestRow(Long.MAX_VALUE)));
spec = new EqualToHavingSpec("metric", 100L);
assertTrue(spec.eval(getTestRow(100L)));
assertTrue(spec.eval(getTestRow(100.0)));
assertTrue(spec.eval(getTestRow(100d)));
assertFalse(spec.eval(getTestRow(100.56d)));
assertFalse(spec.eval(getTestRow(90.53d)));
assertFalse(spec.eval(getTestRow(100.56f)));
assertFalse(spec.eval(getTestRow(90.53f)));
assertFalse(spec.eval(getTestRow(Long.MAX_VALUE)));
Assert.assertTrue(spec.eval(getTestRow(100L)));
Assert.assertTrue(spec.eval(getTestRow(100.0)));
Assert.assertTrue(spec.eval(getTestRow(100d)));
Assert.assertFalse(spec.eval(getTestRow(100.56d)));
Assert.assertFalse(spec.eval(getTestRow(90.53d)));
Assert.assertFalse(spec.eval(getTestRow(100.56f)));
Assert.assertFalse(spec.eval(getTestRow(90.53f)));
Assert.assertFalse(spec.eval(getTestRow(Long.MAX_VALUE)));
}
private static class CountingHavingSpec extends BaseHavingSpec
@ -251,7 +247,7 @@ public class HavingSpecTest
{
AtomicInteger counter = new AtomicInteger(0);
AndHavingSpec spec = new AndHavingSpec(ImmutableList.of(
(HavingSpec) new CountingHavingSpec(counter, true),
new CountingHavingSpec(counter, true),
new CountingHavingSpec(counter, false),
new CountingHavingSpec(counter, true),
new CountingHavingSpec(counter, false)
@ -259,7 +255,7 @@ public class HavingSpecTest
spec.eval(ROW);
assertEquals(2, counter.get());
Assert.assertEquals(2, counter.get());
}
@Test
@ -267,7 +263,7 @@ public class HavingSpecTest
{
AtomicInteger counter = new AtomicInteger(0);
AndHavingSpec spec = new AndHavingSpec(ImmutableList.of(
(HavingSpec) new CountingHavingSpec(counter, true),
new CountingHavingSpec(counter, true),
new CountingHavingSpec(counter, true),
new CountingHavingSpec(counter, true),
new CountingHavingSpec(counter, true)
@ -275,11 +271,11 @@ public class HavingSpecTest
spec.eval(ROW);
assertEquals(4, counter.get());
Assert.assertEquals(4, counter.get());
counter.set(0);
spec = new AndHavingSpec(ImmutableList.of(
(HavingSpec) new CountingHavingSpec(counter, false),
new CountingHavingSpec(counter, false),
new CountingHavingSpec(counter, true),
new CountingHavingSpec(counter, true),
new CountingHavingSpec(counter, true)
@ -287,7 +283,7 @@ public class HavingSpecTest
spec.eval(ROW);
assertEquals(1, counter.get());
Assert.assertEquals(1, counter.get());
}
@Test
@ -295,7 +291,7 @@ public class HavingSpecTest
{
AtomicInteger counter = new AtomicInteger(0);
OrHavingSpec spec = new OrHavingSpec(ImmutableList.of(
(HavingSpec) new CountingHavingSpec(counter, true),
new CountingHavingSpec(counter, true),
new CountingHavingSpec(counter, true),
new CountingHavingSpec(counter, true),
new CountingHavingSpec(counter, false)
@ -303,7 +299,7 @@ public class HavingSpecTest
spec.eval(ROW);
assertEquals(1, counter.get());
Assert.assertEquals(1, counter.get());
}
@Test
@ -311,7 +307,7 @@ public class HavingSpecTest
{
AtomicInteger counter = new AtomicInteger(0);
OrHavingSpec spec = new OrHavingSpec(ImmutableList.of(
(HavingSpec) new CountingHavingSpec(counter, false),
new CountingHavingSpec(counter, false),
new CountingHavingSpec(counter, false),
new CountingHavingSpec(counter, false),
new CountingHavingSpec(counter, false)
@ -319,11 +315,11 @@ public class HavingSpecTest
spec.eval(ROW);
assertEquals(4, counter.get());
Assert.assertEquals(4, counter.get());
counter.set(0);
spec = new OrHavingSpec(ImmutableList.of(
(HavingSpec) new CountingHavingSpec(counter, false),
new CountingHavingSpec(counter, false),
new CountingHavingSpec(counter, false),
new CountingHavingSpec(counter, false),
new CountingHavingSpec(counter, true)
@ -331,17 +327,16 @@ public class HavingSpecTest
spec.eval(ROW);
assertEquals(4, counter.get());
Assert.assertEquals(4, counter.get());
}
@Test
public void testNotHavingSepc()
{
NotHavingSpec spec = new NotHavingSpec(HavingSpec.NEVER);
assertTrue(spec.eval(ROW));
Assert.assertTrue(spec.eval(ROW));
spec = new NotHavingSpec(HavingSpec.ALWAYS);
assertFalse(spec.eval(ROW));
Assert.assertFalse(spec.eval(ROW));
}
}

View File

@ -28,6 +28,7 @@ import org.apache.druid.query.Druids;
import org.apache.druid.query.Druids.SearchQueryBuilder;
import org.apache.druid.query.QueryPlus;
import org.apache.druid.query.QueryRunner;
import org.apache.druid.query.QueryRunnerTestHelper;
import org.apache.druid.query.Result;
import org.apache.druid.segment.IncrementalIndexSegment;
import org.apache.druid.segment.QueryableIndex;
@ -50,20 +51,6 @@ import java.util.Map;
import java.util.Set;
import java.util.TreeMap;
import static org.apache.druid.query.QueryRunnerTestHelper.NOOP_QUERYWATCHER;
import static org.apache.druid.query.QueryRunnerTestHelper.allGran;
import static org.apache.druid.query.QueryRunnerTestHelper.dataSource;
import static org.apache.druid.query.QueryRunnerTestHelper.fullOnIntervalSpec;
import static org.apache.druid.query.QueryRunnerTestHelper.makeQueryRunner;
import static org.apache.druid.query.QueryRunnerTestHelper.marketDimension;
import static org.apache.druid.query.QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator;
import static org.apache.druid.query.QueryRunnerTestHelper.placementDimension;
import static org.apache.druid.query.QueryRunnerTestHelper.placementishDimension;
import static org.apache.druid.query.QueryRunnerTestHelper.qualityDimension;
import static org.apache.druid.query.QueryRunnerTestHelper.transformToConstructionFeeder;
/**
*/
@RunWith(Parameterized.class)
public class SearchQueryRunnerWithCaseTest
{
@ -94,25 +81,25 @@ public class SearchQueryRunnerWithCaseTest
final List<QueryRunner<Result<SearchResultValue>>> runners = new ArrayList<>();
for (SearchQueryConfig config : configs) {
runners.addAll(Arrays.asList(
makeQueryRunner(
QueryRunnerTestHelper.makeQueryRunner(
makeRunnerFactory(config),
SegmentId.dummy("index1"),
new IncrementalIndexSegment(index1, SegmentId.dummy("index1")),
"index1"
),
makeQueryRunner(
QueryRunnerTestHelper.makeQueryRunner(
makeRunnerFactory(config),
SegmentId.dummy("index2"),
new IncrementalIndexSegment(index2, SegmentId.dummy("index2")),
"index2"
),
makeQueryRunner(
QueryRunnerTestHelper.makeQueryRunner(
makeRunnerFactory(config),
SegmentId.dummy("index3"),
new QueryableIndexSegment(index3, SegmentId.dummy("index3")),
"index3"
),
makeQueryRunner(
QueryRunnerTestHelper.makeQueryRunner(
makeRunnerFactory(config),
SegmentId.dummy("index4"),
new QueryableIndexSegment(index4, SegmentId.dummy("index4")),
@ -121,7 +108,7 @@ public class SearchQueryRunnerWithCaseTest
));
}
return transformToConstructionFeeder(runners);
return QueryRunnerTestHelper.transformToConstructionFeeder(runners);
}
static SearchQueryRunnerFactory makeRunnerFactory(final SearchQueryConfig config)
@ -130,9 +117,9 @@ public class SearchQueryRunnerWithCaseTest
new SearchStrategySelector(Suppliers.ofInstance(config)),
new SearchQueryQueryToolChest(
config,
noopIntervalChunkingQueryRunnerDecorator()
QueryRunnerTestHelper.noopIntervalChunkingQueryRunnerDecorator()
),
NOOP_QUERYWATCHER
QueryRunnerTestHelper.NOOP_QUERYWATCHER
);
}
@ -148,9 +135,9 @@ public class SearchQueryRunnerWithCaseTest
private Druids.SearchQueryBuilder testBuilder()
{
return Druids.newSearchQueryBuilder()
.dataSource(dataSource)
.granularity(allGran)
.intervals(fullOnIntervalSpec);
.dataSource(QueryRunnerTestHelper.dataSource)
.granularity(QueryRunnerTestHelper.allGran)
.intervals(QueryRunnerTestHelper.fullOnIntervalSpec);
}
@Test
@ -161,15 +148,15 @@ public class SearchQueryRunnerWithCaseTest
SearchQuery searchQuery;
searchQuery = builder.query("SPOT").build();
expectedResults.put(marketDimension, Sets.newHashSet("spot", "SPot"));
expectedResults.put(QueryRunnerTestHelper.marketDimension, Sets.newHashSet("spot", "SPot"));
checkSearchQuery(searchQuery, expectedResults);
searchQuery = builder.query("spot", true).build();
expectedResults.put(marketDimension, Sets.newHashSet("spot"));
expectedResults.put(QueryRunnerTestHelper.marketDimension, Sets.newHashSet("spot"));
checkSearchQuery(searchQuery, expectedResults);
searchQuery = builder.query("SPot", true).build();
expectedResults.put(marketDimension, Sets.newHashSet("SPot"));
expectedResults.put(QueryRunnerTestHelper.marketDimension, Sets.newHashSet("SPot"));
checkSearchQuery(searchQuery, expectedResults);
}
@ -178,17 +165,23 @@ public class SearchQueryRunnerWithCaseTest
{
SearchQuery searchQuery;
Druids.SearchQueryBuilder builder = testBuilder()
.dimensions(Arrays.asList(placementDimension, placementishDimension));
.dimensions(Arrays.asList(
QueryRunnerTestHelper.placementDimension,
QueryRunnerTestHelper.placementishDimension
));
Map<String, Set<String>> expectedResults = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
searchQuery = builder.query("PREFERRED").build();
expectedResults.put(placementDimension, Sets.newHashSet("PREFERRED", "preferred", "PREFERRed"));
expectedResults.put(placementishDimension, Sets.newHashSet("preferred", "Preferred"));
expectedResults.put(
QueryRunnerTestHelper.placementDimension,
Sets.newHashSet("PREFERRED", "preferred", "PREFERRed")
);
expectedResults.put(QueryRunnerTestHelper.placementishDimension, Sets.newHashSet("preferred", "Preferred"));
checkSearchQuery(searchQuery, expectedResults);
searchQuery = builder.query("preferred", true).build();
expectedResults.put(placementDimension, Sets.newHashSet("preferred"));
expectedResults.put(placementishDimension, Sets.newHashSet("preferred"));
expectedResults.put(QueryRunnerTestHelper.placementDimension, Sets.newHashSet("preferred"));
expectedResults.put(QueryRunnerTestHelper.placementishDimension, Sets.newHashSet("preferred"));
checkSearchQuery(searchQuery, expectedResults);
}
@ -197,12 +190,12 @@ public class SearchQueryRunnerWithCaseTest
{
SearchQuery searchQuery;
Druids.SearchQueryBuilder builder = testBuilder()
.dimensions(Collections.singletonList(qualityDimension))
.dimensions(Collections.singletonList(QueryRunnerTestHelper.qualityDimension))
.intervals("2011-01-12T00:00:00.000Z/2011-01-13T00:00:00.000Z");
Map<String, Set<String>> expectedResults = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
searchQuery = builder.query("otive").build();
expectedResults.put(qualityDimension, Sets.newHashSet("AutoMotive"));
expectedResults.put(QueryRunnerTestHelper.qualityDimension, Sets.newHashSet("AutoMotive"));
checkSearchQuery(searchQuery, expectedResults);
}
@ -211,12 +204,12 @@ public class SearchQueryRunnerWithCaseTest
{
SearchQuery searchQuery;
Druids.SearchQueryBuilder builder = testBuilder()
.dimensions(Collections.singletonList(qualityDimension))
.dimensions(Collections.singletonList(QueryRunnerTestHelper.qualityDimension))
.intervals("2011-01-10T00:00:00.000Z/2011-01-11T00:00:00.000Z");
Map<String, Set<String>> expectedResults = new TreeMap<>(String.CASE_INSENSITIVE_ORDER);
searchQuery = builder.query("business").build();
expectedResults.put(qualityDimension, new HashSet<>());
expectedResults.put(QueryRunnerTestHelper.qualityDimension, new HashSet<>());
checkSearchQuery(searchQuery, expectedResults);
}
@ -228,11 +221,11 @@ public class SearchQueryRunnerWithCaseTest
SearchQuery searchQuery;
searchQuery = builder.fragments(Arrays.asList("auto", "ve")).build();
expectedResults.put(qualityDimension, Sets.newHashSet("automotive", "AutoMotive"));
expectedResults.put(QueryRunnerTestHelper.qualityDimension, Sets.newHashSet("automotive", "AutoMotive"));
checkSearchQuery(searchQuery, expectedResults);
searchQuery = builder.fragments(Arrays.asList("auto", "ve"), true).build();
expectedResults.put(qualityDimension, Sets.newHashSet("automotive"));
expectedResults.put(QueryRunnerTestHelper.qualityDimension, Sets.newHashSet("automotive"));
checkSearchQuery(searchQuery, expectedResults);
}

View File

@ -28,6 +28,7 @@ import org.apache.druid.java.util.common.Pair;
import org.apache.druid.java.util.common.granularity.Granularity;
import org.apache.druid.java.util.common.guava.Sequence;
import org.apache.druid.query.QueryMetrics;
import org.apache.druid.query.QueryRunnerTestHelper;
import org.apache.druid.query.aggregation.DoubleMaxAggregatorFactory;
import org.apache.druid.query.aggregation.DoubleMinAggregatorFactory;
import org.apache.druid.query.filter.Filter;
@ -51,14 +52,6 @@ import org.junit.Test;
import javax.annotation.Nullable;
import java.util.Collections;
import static org.apache.druid.query.QueryRunnerTestHelper.addRowsIndexConstant;
import static org.apache.druid.query.QueryRunnerTestHelper.allGran;
import static org.apache.druid.query.QueryRunnerTestHelper.commonDoubleAggregators;
import static org.apache.druid.query.QueryRunnerTestHelper.dataSource;
import static org.apache.druid.query.QueryRunnerTestHelper.indexMetric;
import static org.apache.druid.query.QueryRunnerTestHelper.marketDimension;
import static org.apache.druid.query.QueryRunnerTestHelper.qualityDimension;
public class TopNMetricSpecOptimizationsTest
{
@Test
@ -68,16 +61,16 @@ public class TopNMetricSpecOptimizationsTest
int cardinality = 1234;
int threshold = 4;
TopNQuery query = new TopNQueryBuilder()
.dataSource(dataSource)
.granularity(allGran)
.dimension(marketDimension)
.metric(indexMetric)
.dataSource(QueryRunnerTestHelper.dataSource)
.granularity(QueryRunnerTestHelper.allGran)
.dimension(QueryRunnerTestHelper.marketDimension)
.metric(QueryRunnerTestHelper.indexMetric)
.threshold(threshold)
.intervals("2018-05-30T00:00:00Z/2018-05-31T00:00:00Z")
.aggregators(
Lists.newArrayList(
Iterables.concat(
commonDoubleAggregators,
QueryRunnerTestHelper.commonDoubleAggregators,
Lists.newArrayList(
new DoubleMaxAggregatorFactory("maxIndex", "index"),
new DoubleMinAggregatorFactory("minIndex", "index")
@ -85,7 +78,7 @@ public class TopNMetricSpecOptimizationsTest
)
)
)
.postAggregators(Collections.singletonList(addRowsIndexConstant))
.postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant))
.build();
StorageAdapter adapter =
@ -112,16 +105,16 @@ public class TopNMetricSpecOptimizationsTest
int cardinality = 1234;
int threshold = 4;
TopNQuery query = new TopNQueryBuilder()
.dataSource(dataSource)
.granularity(allGran)
.dimension(marketDimension)
.metric(indexMetric)
.dataSource(QueryRunnerTestHelper.dataSource)
.granularity(QueryRunnerTestHelper.allGran)
.dimension(QueryRunnerTestHelper.marketDimension)
.metric(QueryRunnerTestHelper.indexMetric)
.threshold(threshold)
.intervals("2018-05-30T00:00:00Z/2018-05-30T01:00:00Z")
.aggregators(
Lists.newArrayList(
Iterables.concat(
commonDoubleAggregators,
QueryRunnerTestHelper.commonDoubleAggregators,
Lists.newArrayList(
new DoubleMaxAggregatorFactory("maxIndex", "index"),
new DoubleMinAggregatorFactory("minIndex", "index")
@ -129,7 +122,7 @@ public class TopNMetricSpecOptimizationsTest
)
)
)
.postAggregators(Collections.singletonList(addRowsIndexConstant))
.postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant))
.build();
StorageAdapter adapter =
@ -157,16 +150,16 @@ public class TopNMetricSpecOptimizationsTest
int cardinality = 1234;
int threshold = 4;
TopNQuery query = new TopNQueryBuilder()
.dataSource(dataSource)
.granularity(allGran)
.dimension(marketDimension)
.metric(indexMetric)
.dataSource(QueryRunnerTestHelper.dataSource)
.granularity(QueryRunnerTestHelper.allGran)
.dimension(QueryRunnerTestHelper.marketDimension)
.metric(QueryRunnerTestHelper.indexMetric)
.threshold(threshold)
.intervals("2018-05-30T00:00:00Z/2018-05-30T01:00:00Z")
.aggregators(
Lists.newArrayList(
Iterables.concat(
commonDoubleAggregators,
QueryRunnerTestHelper.commonDoubleAggregators,
Lists.newArrayList(
new DoubleMaxAggregatorFactory("maxIndex", "index"),
new DoubleMinAggregatorFactory("minIndex", "index")
@ -174,7 +167,7 @@ public class TopNMetricSpecOptimizationsTest
)
)
)
.postAggregators(Collections.singletonList(addRowsIndexConstant))
.postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant))
.build();
StorageAdapter adapter =
@ -202,17 +195,17 @@ public class TopNMetricSpecOptimizationsTest
int cardinality = 1234;
int threshold = 4;
TopNQuery query = new TopNQueryBuilder()
.dataSource(dataSource)
.granularity(allGran)
.dimension(marketDimension)
.filters(qualityDimension, "entertainment")
.metric(indexMetric)
.dataSource(QueryRunnerTestHelper.dataSource)
.granularity(QueryRunnerTestHelper.allGran)
.dimension(QueryRunnerTestHelper.marketDimension)
.filters(QueryRunnerTestHelper.qualityDimension, "entertainment")
.metric(QueryRunnerTestHelper.indexMetric)
.threshold(threshold)
.intervals("2018-05-30T00:00:00Z/2018-05-31T00:00:00Z")
.aggregators(
Lists.newArrayList(
Iterables.concat(
commonDoubleAggregators,
QueryRunnerTestHelper.commonDoubleAggregators,
Lists.newArrayList(
new DoubleMaxAggregatorFactory("maxIndex", "index"),
new DoubleMinAggregatorFactory("minIndex", "index")
@ -220,7 +213,7 @@ public class TopNMetricSpecOptimizationsTest
)
)
)
.postAggregators(Collections.singletonList(addRowsIndexConstant))
.postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant))
.build();
StorageAdapter adapter =
@ -247,16 +240,16 @@ public class TopNMetricSpecOptimizationsTest
int cardinality = 1234;
int threshold = 4;
TopNQuery query = new TopNQueryBuilder()
.dataSource(dataSource)
.granularity(allGran)
.dimension(marketDimension)
.metric(indexMetric)
.dataSource(QueryRunnerTestHelper.dataSource)
.granularity(QueryRunnerTestHelper.allGran)
.dimension(QueryRunnerTestHelper.marketDimension)
.metric(QueryRunnerTestHelper.indexMetric)
.threshold(threshold)
.intervals("2018-05-30T00:00:00Z/2018-05-31T00:00:00Z")
.aggregators(
Lists.newArrayList(
Iterables.concat(
commonDoubleAggregators,
QueryRunnerTestHelper.commonDoubleAggregators,
Lists.newArrayList(
new DoubleMaxAggregatorFactory("maxIndex", "index"),
new DoubleMinAggregatorFactory("minIndex", "index")
@ -264,7 +257,7 @@ public class TopNMetricSpecOptimizationsTest
)
)
)
.postAggregators(Collections.singletonList(addRowsIndexConstant))
.postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant))
.build();

View File

@ -24,6 +24,7 @@ import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.common.collect.Lists;
import org.apache.druid.query.Query;
import org.apache.druid.query.QueryRunnerTestHelper;
import org.apache.druid.query.aggregation.DoubleMaxAggregatorFactory;
import org.apache.druid.query.aggregation.DoubleMinAggregatorFactory;
import org.apache.druid.query.dimension.ExtractionDimensionSpec;
@ -38,15 +39,6 @@ import org.junit.Test;
import java.io.IOException;
import java.util.Collections;
import static org.apache.druid.query.QueryRunnerTestHelper.addRowsIndexConstant;
import static org.apache.druid.query.QueryRunnerTestHelper.allGran;
import static org.apache.druid.query.QueryRunnerTestHelper.commonDoubleAggregators;
import static org.apache.druid.query.QueryRunnerTestHelper.dataSource;
import static org.apache.druid.query.QueryRunnerTestHelper.fullOnIntervalSpec;
import static org.apache.druid.query.QueryRunnerTestHelper.indexMetric;
import static org.apache.druid.query.QueryRunnerTestHelper.marketDimension;
import static org.apache.druid.query.QueryRunnerTestHelper.rowsCount;
public class TopNQueryTest
{
private static final ObjectMapper jsonMapper = TestHelper.makeJsonMapper();
@ -55,16 +47,16 @@ public class TopNQueryTest
public void testQuerySerialization() throws IOException
{
Query query = new TopNQueryBuilder()
.dataSource(dataSource)
.granularity(allGran)
.dimension(marketDimension)
.metric(indexMetric)
.dataSource(QueryRunnerTestHelper.dataSource)
.granularity(QueryRunnerTestHelper.allGran)
.dimension(QueryRunnerTestHelper.marketDimension)
.metric(QueryRunnerTestHelper.indexMetric)
.threshold(4)
.intervals(fullOnIntervalSpec)
.intervals(QueryRunnerTestHelper.fullOnIntervalSpec)
.aggregators(
Lists.newArrayList(
Iterables.concat(
commonDoubleAggregators,
QueryRunnerTestHelper.commonDoubleAggregators,
Lists.newArrayList(
new DoubleMaxAggregatorFactory("maxIndex", "index"),
new DoubleMinAggregatorFactory("minIndex", "index")
@ -72,7 +64,7 @@ public class TopNQueryTest
)
)
)
.postAggregators(Collections.singletonList(addRowsIndexConstant))
.postAggregators(Collections.singletonList(QueryRunnerTestHelper.addRowsIndexConstant))
.build();
String json = jsonMapper.writeValueAsString(query);
@ -86,22 +78,28 @@ public class TopNQueryTest
public void testQuerySerdeWithLookupExtractionFn() throws IOException
{
final TopNQuery expectedQuery = new TopNQueryBuilder()
.dataSource(dataSource)
.granularity(allGran)
.dataSource(QueryRunnerTestHelper.dataSource)
.granularity(QueryRunnerTestHelper.allGran)
.dimension(
new ExtractionDimensionSpec(
marketDimension,
marketDimension,
new LookupExtractionFn(new MapLookupExtractor(ImmutableMap.of("foo", "bar"), false), true, null, false, false)
QueryRunnerTestHelper.marketDimension,
QueryRunnerTestHelper.marketDimension,
new LookupExtractionFn(
new MapLookupExtractor(ImmutableMap.of("foo", "bar"), false),
true,
null,
false,
false
)
)
)
.metric(new NumericTopNMetricSpec(indexMetric))
.metric(new NumericTopNMetricSpec(QueryRunnerTestHelper.indexMetric))
.threshold(2)
.intervals(fullOnIntervalSpec.getIntervals())
.intervals(QueryRunnerTestHelper.fullOnIntervalSpec.getIntervals())
.aggregators(
Lists.newArrayList(
Iterables.concat(
commonDoubleAggregators,
QueryRunnerTestHelper.commonDoubleAggregators,
Lists.newArrayList(
new DoubleMaxAggregatorFactory("maxIndex", "index"),
new DoubleMinAggregatorFactory("minIndex", "index")
@ -118,13 +116,13 @@ public class TopNQueryTest
public void testQuerySerdeWithAlphaNumericTopNMetricSpec() throws IOException
{
TopNQuery expectedQuery = new TopNQueryBuilder()
.dataSource(dataSource)
.granularity(allGran)
.dimension(new LegacyDimensionSpec(marketDimension))
.dataSource(QueryRunnerTestHelper.dataSource)
.granularity(QueryRunnerTestHelper.allGran)
.dimension(new LegacyDimensionSpec(QueryRunnerTestHelper.marketDimension))
.metric(new DimensionTopNMetricSpec(null, StringComparators.ALPHANUMERIC))
.threshold(2)
.intervals(fullOnIntervalSpec.getIntervals())
.aggregators(Collections.singletonList(rowsCount))
.intervals(QueryRunnerTestHelper.fullOnIntervalSpec.getIntervals())
.aggregators(Collections.singletonList(QueryRunnerTestHelper.rowsCount))
.build();
String jsonQuery = "{\n"
+ " \"queryType\": \"topN\",\n"
@ -156,5 +154,4 @@ public class TopNQueryTest
);
Assert.assertEquals(expectedQuery, actualQuery);
}
}

View File

@ -21,32 +21,29 @@ package org.apache.druid.segment;
import it.unimi.dsi.fastutil.ints.IntIterators;
import it.unimi.dsi.fastutil.ints.IntListIterator;
import org.junit.Assert;
import org.junit.Test;
import static org.apache.druid.segment.IntIteratorUtils.skip;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
public class IntIteratorUtilsTest
{
@Test
public void testSkip()
{
assertEquals(0, skip(IntIterators.EMPTY_ITERATOR, 5));
assertEquals(0, skip(IntIterators.EMPTY_ITERATOR, 0));
Assert.assertEquals(0, IntIteratorUtils.skip(IntIterators.EMPTY_ITERATOR, 5));
Assert.assertEquals(0, IntIteratorUtils.skip(IntIterators.EMPTY_ITERATOR, 0));
IntListIterator it = IntIterators.fromTo(0, 10);
assertEquals(3, skip(it, 3));
assertEquals(3, it.nextInt());
assertEquals(6, skip(it, 100));
assertEquals(0, skip(it, 100));
assertFalse(it.hasNext());
Assert.assertEquals(3, IntIteratorUtils.skip(it, 3));
Assert.assertEquals(3, it.nextInt());
Assert.assertEquals(6, IntIteratorUtils.skip(it, 100));
Assert.assertEquals(0, IntIteratorUtils.skip(it, 100));
Assert.assertFalse(it.hasNext());
}
@Test(expected = IllegalArgumentException.class)
public void testNegativeSkipArgument()
{
skip(IntIterators.fromTo(0, 10), -1);
IntIteratorUtils.skip(IntIterators.fromTo(0, 10), -1);
}
}

View File

@ -20,17 +20,16 @@
package org.apache.druid.segment;
import it.unimi.dsi.fastutil.ints.IntList;
import org.junit.Assert;
import org.junit.Test;
import static org.junit.Assert.assertEquals;
public class IntListUtilsTest
{
@Test(expected = IndexOutOfBoundsException.class)
public void testEmptyRangeIntList()
{
final IntList list = IntListUtils.fromTo(10, 10);
assertEquals(0, list.size());
Assert.assertEquals(0, list.size());
list.getInt(0);
}
@ -45,7 +44,7 @@ public class IntListUtilsTest
{
final IntList list = IntListUtils.fromTo(20, 120);
for (int i = 0; i < 100; i++) {
assertEquals(i + 20, list.getInt(i));
Assert.assertEquals(i + 20, list.getInt(i));
}
}
}

View File

@ -22,6 +22,7 @@ package org.apache.druid.segment;
import com.google.common.collect.Lists;
import it.unimi.dsi.fastutil.ints.IntArrayList;
import it.unimi.dsi.fastutil.ints.IntIterator;
import it.unimi.dsi.fastutil.ints.IntIterators;
import it.unimi.dsi.fastutil.ints.IntList;
import it.unimi.dsi.fastutil.ints.IntLists;
import org.junit.Assert;
@ -34,34 +35,32 @@ import java.util.List;
import java.util.NoSuchElementException;
import java.util.concurrent.ThreadLocalRandom;
import static it.unimi.dsi.fastutil.ints.IntIterators.EMPTY_ITERATOR;
import static java.lang.Integer.MAX_VALUE;
import static org.apache.druid.segment.IntIteratorUtils.mergeAscending;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.fail;
public class MergeIntIteratorTest
{
@Test(expected = NoSuchElementException.class)
public void testNoIterators()
{
IntIterator it = mergeAscending(Collections.emptyList());
IntIterator it = IntIteratorUtils.mergeAscending(Collections.emptyList());
assertEmpty(it);
}
@Test(expected = NoSuchElementException.class)
public void testMergeEmptyIterators()
{
IntIterator it = mergeAscending(Arrays.asList(EMPTY_ITERATOR, EMPTY_ITERATOR));
IntIterator it = IntIteratorUtils.mergeAscending(Arrays.asList(
IntIterators.EMPTY_ITERATOR,
IntIterators.EMPTY_ITERATOR
));
assertEmpty(it);
}
private static void assertEmpty(IntIterator it)
{
assertFalse(it.hasNext());
Assert.assertFalse(it.hasNext());
try {
//noinspection deprecation
it.next();
fail("expected NoSuchElementException on it.next() after it.hasNext() = false");
Assert.fail("expected NoSuchElementException on it.next() after it.hasNext() = false");
}
catch (NoSuchElementException ignore) {
// expected
@ -82,14 +81,14 @@ public class MergeIntIteratorTest
IntLists.singleton(Integer.MIN_VALUE),
IntLists.singleton(-1),
IntLists.singleton(0),
IntLists.singleton(MAX_VALUE)
IntLists.singleton(Integer.MAX_VALUE)
);
for (int i = 0; i < lists.size() + 1; i++) {
assertAscending(mergeAscending(iteratorsFromLists(lists)));
assertAscending(IntIteratorUtils.mergeAscending(iteratorsFromLists(lists)));
Collections.rotate(lists, 1);
}
Collections.shuffle(lists);
assertAscending(mergeAscending(iteratorsFromLists(lists)));
assertAscending(IntIteratorUtils.mergeAscending(iteratorsFromLists(lists)));
}
private static List<IntIterator> iteratorsFromLists(List<IntList> lists)
@ -115,12 +114,12 @@ public class MergeIntIteratorTest
lists.get(r.nextInt(numIterators)).add(j);
}
for (int j = 0; j < lists.size() + 1; j++) {
assertAscending(mergeAscending(iteratorsFromLists(lists)));
assertAscending(IntIteratorUtils.mergeAscending(iteratorsFromLists(lists)));
Collections.rotate(lists, 1);
}
for (int j = 0; j < 10; j++) {
Collections.shuffle(lists);
assertAscending(mergeAscending(iteratorsFromLists(lists)));
assertAscending(IntIteratorUtils.mergeAscending(iteratorsFromLists(lists)));
}
}
}

View File

@ -34,6 +34,7 @@ import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMedium;
import org.apache.druid.segment.writeout.SegmentWriteOutMedium;
import org.apache.druid.segment.writeout.WriteOutBytes;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
@ -49,8 +50,6 @@ import java.util.List;
import java.util.Random;
import java.util.Set;
import static org.junit.Assert.assertEquals;
@RunWith(Parameterized.class)
public class CompressedColumnarIntsSerializerTest
{
@ -135,7 +134,7 @@ public class CompressedColumnarIntsSerializerTest
writer.writeTo(writeOutBytes, smoosher);
smoosher.close();
assertEquals(writtenLength, supplierFromList.getSerializedSize());
Assert.assertEquals(writtenLength, supplierFromList.getSerializedSize());
// read from ByteBuffer and check values
CompressedColumnarIntsSupplier supplierFromByteBuffer = CompressedColumnarIntsSupplier.fromByteBuffer(
@ -143,9 +142,9 @@ public class CompressedColumnarIntsSerializerTest
byteOrder
);
ColumnarInts columnarInts = supplierFromByteBuffer.get();
assertEquals(vals.length, columnarInts.size());
Assert.assertEquals(vals.length, columnarInts.size());
for (int i = 0; i < vals.length; ++i) {
assertEquals(vals[i], columnarInts.get(i));
Assert.assertEquals(vals[i], columnarInts.get(i));
}
CloseQuietly.close(columnarInts);
}
@ -221,9 +220,9 @@ public class CompressedColumnarIntsSerializerTest
byteOrder
);
ColumnarInts columnarInts = supplierFromByteBuffer.get();
assertEquals(vals.length, columnarInts.size());
Assert.assertEquals(vals.length, columnarInts.size());
for (int i = 0; i < vals.length; ++i) {
assertEquals(vals[i], columnarInts.get(i));
Assert.assertEquals(vals[i], columnarInts.get(i));
}
CloseQuietly.close(columnarInts);
mapper.close();

View File

@ -34,6 +34,7 @@ import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMedium;
import org.apache.druid.segment.writeout.SegmentWriteOutMedium;
import org.apache.druid.segment.writeout.WriteOutBytes;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
@ -48,8 +49,6 @@ import java.util.List;
import java.util.Random;
import java.util.Set;
import static org.junit.Assert.assertEquals;
@RunWith(Parameterized.class)
public class CompressedVSizeColumnarIntsSerializerTest
{
@ -135,7 +134,7 @@ public class CompressedVSizeColumnarIntsSerializerTest
writer.writeTo(writeOutBytes, smoosher);
smoosher.close();
assertEquals(writtenLength, supplierFromList.getSerializedSize());
Assert.assertEquals(writtenLength, supplierFromList.getSerializedSize());
// read from ByteBuffer and check values
CompressedVSizeColumnarIntsSupplier supplierFromByteBuffer = CompressedVSizeColumnarIntsSupplier.fromByteBuffer(
@ -144,7 +143,7 @@ public class CompressedVSizeColumnarIntsSerializerTest
);
ColumnarInts columnarInts = supplierFromByteBuffer.get();
for (int i = 0; i < vals.length; ++i) {
assertEquals(vals[i], columnarInts.get(i));
Assert.assertEquals(vals[i], columnarInts.get(i));
}
CloseQuietly.close(columnarInts);
}
@ -219,7 +218,7 @@ public class CompressedVSizeColumnarIntsSerializerTest
ColumnarInts columnarInts = supplierFromByteBuffer.get();
for (int i = 0; i < vals.length; ++i) {
assertEquals(vals[i], columnarInts.get(i));
Assert.assertEquals(vals[i], columnarInts.get(i));
}
CloseQuietly.close(columnarInts);
mapper.close();

View File

@ -32,6 +32,7 @@ import org.apache.druid.java.util.common.io.smoosh.SmooshedWriter;
import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMedium;
import org.apache.druid.segment.writeout.SegmentWriteOutMedium;
import org.apache.druid.segment.writeout.WriteOutBytes;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Rule;
import org.junit.Test;
@ -50,8 +51,6 @@ import java.util.Random;
import java.util.Set;
import java.util.stream.IntStream;
import static org.junit.Assert.assertEquals;
@RunWith(Parameterized.class)
public class V3CompressedVSizeColumnarMultiIntsSerializerTest
{
@ -147,7 +146,7 @@ public class V3CompressedVSizeColumnarMultiIntsSerializerTest
writer.writeTo(writeOutBytes, smoosher);
smoosher.close();
assertEquals(writtenLength, supplierFromIterable.getSerializedSize());
Assert.assertEquals(writtenLength, supplierFromIterable.getSerializedSize());
// read from ByteBuffer and check values
V3CompressedVSizeColumnarMultiIntsSupplier supplierFromByteBuffer = V3CompressedVSizeColumnarMultiIntsSupplier.fromByteBuffer(
@ -156,19 +155,19 @@ public class V3CompressedVSizeColumnarMultiIntsSerializerTest
);
try (final ColumnarMultiInts columnarMultiInts = supplierFromByteBuffer.get()) {
assertEquals(columnarMultiInts.size(), vals.size());
Assert.assertEquals(columnarMultiInts.size(), vals.size());
for (int i = 0; i < vals.size(); ++i) {
IndexedInts subVals = columnarMultiInts.get(i);
assertEquals(subVals.size(), vals.get(i).length);
Assert.assertEquals(subVals.size(), vals.get(i).length);
for (int j = 0, size = subVals.size(); j < size; ++j) {
assertEquals(subVals.get(j), vals.get(i)[j]);
Assert.assertEquals(subVals.get(j), vals.get(i)[j]);
}
}
}
}
}
int getMaxValue(final List<int[]> vals)
private int getMaxValue(final List<int[]> vals)
{
return vals
.stream()
@ -270,12 +269,12 @@ public class V3CompressedVSizeColumnarMultiIntsSerializerTest
V3CompressedVSizeColumnarMultiIntsSupplier supplierFromByteBuffer =
V3CompressedVSizeColumnarMultiIntsSupplier.fromByteBuffer(mapper.mapFile("test"), byteOrder);
ColumnarMultiInts columnarMultiInts = supplierFromByteBuffer.get();
assertEquals(columnarMultiInts.size(), vals.size());
Assert.assertEquals(columnarMultiInts.size(), vals.size());
for (int i = 0; i < vals.size(); ++i) {
IndexedInts subVals = columnarMultiInts.get(i);
assertEquals(subVals.size(), vals.get(i).length);
Assert.assertEquals(subVals.size(), vals.get(i).length);
for (int j = 0, size = subVals.size(); j < size; ++j) {
assertEquals(subVals.get(j), vals.get(i)[j]);
Assert.assertEquals(subVals.get(j), vals.get(i)[j]);
}
}
CloseQuietly.close(columnarMultiInts);

View File

@ -25,14 +25,13 @@ import org.apache.druid.segment.writeout.OffHeapMemorySegmentWriteOutMedium;
import org.apache.druid.segment.writeout.SegmentWriteOutMedium;
import org.apache.druid.segment.writeout.WriteOutBytes;
import org.junit.After;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import java.nio.ByteBuffer;
import java.util.Random;
import static org.junit.Assert.assertEquals;
public class VSizeColumnarIntsSerializerTest
{
private static final int[] MAX_VALUES = new int[]{0xFF, 0xFFFF, 0xFFFFFF, 0x0FFFFFFF};
@ -75,15 +74,15 @@ public class VSizeColumnarIntsSerializerTest
WriteOutBytes writeOutBytes = segmentWriteOutMedium.makeWriteOutBytes();
writer.writeTo(writeOutBytes, null);
assertEquals(writtenLength, intsFromList.getSerializedSize());
Assert.assertEquals(writtenLength, intsFromList.getSerializedSize());
// read from ByteBuffer and check values
VSizeColumnarInts intsFromByteBuffer = VSizeColumnarInts.readFromByteBuffer(
ByteBuffer.wrap(IOUtils.toByteArray(writeOutBytes.asInputStream()))
);
assertEquals(vals.length, intsFromByteBuffer.size());
Assert.assertEquals(vals.length, intsFromByteBuffer.size());
for (int i = 0; i < vals.length; ++i) {
assertEquals(vals[i], intsFromByteBuffer.get(i));
Assert.assertEquals(vals[i], intsFromByteBuffer.get(i));
}
}

View File

@ -27,12 +27,11 @@ import org.apache.druid.collections.bitmap.ImmutableBitmap;
import org.apache.druid.collections.bitmap.MutableBitmap;
import org.apache.druid.segment.IntIteratorUtils;
import org.apache.druid.segment.column.BitmapIndex;
import org.junit.Assert;
import org.junit.Test;
import java.util.List;
import static org.junit.Assert.assertEquals;
public class FiltersTest
{
@Test
@ -48,7 +47,7 @@ public class FiltersTest
10000
);
final double expected = 0.1;
assertEquals(expected, estimated, 0.00001);
Assert.assertEquals(expected, estimated, 0.00001);
}
private static BitmapIndex getBitmapIndex(final List<ImmutableBitmap> bitmapList)

View File

@ -32,19 +32,13 @@ import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
import static org.easymock.EasyMock.expect;
/**
*/
public class ServerSelectorTest
{
TierSelectorStrategy tierSelectorStrategy;
@Before
public void setUp()
{
tierSelectorStrategy = EasyMock.createMock(TierSelectorStrategy.class);
expect(tierSelectorStrategy.getComparator()).andReturn(Integer::compare).anyTimes();
TierSelectorStrategy tierSelectorStrategy = EasyMock.createMock(TierSelectorStrategy.class);
EasyMock.expect(tierSelectorStrategy.getComparator()).andReturn(Integer::compare).anyTimes();
}
@Test

View File

@ -43,11 +43,6 @@ import java.util.HashMap;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import static org.easymock.EasyMock.createMock;
import static org.easymock.EasyMock.expect;
import static org.easymock.EasyMock.replay;
import static org.easymock.EasyMock.reset;
public class LookupReferencesManagerTest
{
private static final String LOOKUP_TIER = "lookupTier";
@ -67,7 +62,7 @@ public class LookupReferencesManagerTest
druidLeaderClient = EasyMock.createMock(DruidLeaderClient.class);
config = createMock(LookupListeningAnnouncerConfig.class);
config = EasyMock.createMock(LookupListeningAnnouncerConfig.class);
lookupExtractorFactory = new MapLookupExtractorFactory(
ImmutableMap.of(
@ -99,20 +94,20 @@ public class LookupReferencesManagerTest
lookupMap.put("testMockForStartStop", container);
String strResult = mapper.writeValueAsString(lookupMap);
Request request = new Request(HttpMethod.GET, new URL("http://localhost:1234/xx"));
expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes();
replay(config);
expect(druidLeaderClient.makeRequest(
EasyMock.expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes();
EasyMock.replay(config);
EasyMock.expect(druidLeaderClient.makeRequest(
HttpMethod.GET,
"/druid/coordinator/v1/lookups/config/lookupTier?detailed=true"
))
.andReturn(request);
.andReturn(request);
FullResponseHolder responseHolder = new FullResponseHolder(
HttpResponseStatus.OK,
EasyMock.createNiceMock(HttpResponse.class),
new StringBuilder().append(strResult)
);
expect(druidLeaderClient.go(request)).andReturn(responseHolder);
replay(druidLeaderClient);
EasyMock.expect(druidLeaderClient.go(request)).andReturn(responseHolder);
EasyMock.replay(druidLeaderClient);
Assert.assertFalse(lookupReferencesManager.lifecycleLock.awaitStarted(1, TimeUnit.MICROSECONDS));
Assert.assertNull(lookupReferencesManager.mainThread);
Assert.assertNull(lookupReferencesManager.stateRef.get());
@ -163,20 +158,20 @@ public class LookupReferencesManagerTest
lookupMap.put("testMockForAddGetRemove", container);
String strResult = mapper.writeValueAsString(lookupMap);
Request request = new Request(HttpMethod.GET, new URL("http://localhost:1234/xx"));
expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes();
replay(config);
expect(druidLeaderClient.makeRequest(
EasyMock.expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes();
EasyMock.replay(config);
EasyMock.expect(druidLeaderClient.makeRequest(
HttpMethod.GET,
"/druid/coordinator/v1/lookups/config/lookupTier?detailed=true"
))
.andReturn(request);
.andReturn(request);
FullResponseHolder responseHolder = new FullResponseHolder(
HttpResponseStatus.OK,
EasyMock.createNiceMock(HttpResponse.class),
new StringBuilder().append(strResult)
);
expect(druidLeaderClient.go(request)).andReturn(responseHolder);
replay(druidLeaderClient);
EasyMock.expect(druidLeaderClient.go(request)).andReturn(responseHolder);
EasyMock.replay(druidLeaderClient);
lookupReferencesManager.start();
Assert.assertNull(lookupReferencesManager.get("test"));
@ -204,20 +199,20 @@ public class LookupReferencesManagerTest
lookupMap.put("testMockForCloseIsCalledAfterStopping", container);
String strResult = mapper.writeValueAsString(lookupMap);
Request request = new Request(HttpMethod.GET, new URL("http://localhost:1234/xx"));
expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes();
replay(config);
expect(druidLeaderClient.makeRequest(
EasyMock.expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes();
EasyMock.replay(config);
EasyMock.expect(druidLeaderClient.makeRequest(
HttpMethod.GET,
"/druid/coordinator/v1/lookups/config/lookupTier?detailed=true"
))
.andReturn(request);
.andReturn(request);
FullResponseHolder responseHolder = new FullResponseHolder(
HttpResponseStatus.OK,
EasyMock.createNiceMock(HttpResponse.class),
new StringBuilder().append(strResult)
);
expect(druidLeaderClient.go(request)).andReturn(responseHolder);
replay(druidLeaderClient);
EasyMock.expect(druidLeaderClient.go(request)).andReturn(responseHolder);
EasyMock.replay(druidLeaderClient);
lookupReferencesManager.start();
lookupReferencesManager.add("testMock", new LookupExtractorFactoryContainer("0", lookupExtractorFactory));
lookupReferencesManager.handlePendingNotices();
@ -238,20 +233,20 @@ public class LookupReferencesManagerTest
lookupMap.put("testMockForDestroyIsCalledAfterRemove", container);
String strResult = mapper.writeValueAsString(lookupMap);
Request request = new Request(HttpMethod.GET, new URL("http://localhost:1234/xx"));
expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes();
replay(config);
expect(druidLeaderClient.makeRequest(
EasyMock.expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes();
EasyMock.replay(config);
EasyMock.expect(druidLeaderClient.makeRequest(
HttpMethod.GET,
"/druid/coordinator/v1/lookups/config/lookupTier?detailed=true"
))
.andReturn(request);
.andReturn(request);
FullResponseHolder responseHolder = new FullResponseHolder(
HttpResponseStatus.OK,
EasyMock.createNiceMock(HttpResponse.class),
new StringBuilder().append(strResult)
);
expect(druidLeaderClient.go(request)).andReturn(responseHolder);
replay(druidLeaderClient);
EasyMock.expect(druidLeaderClient.go(request)).andReturn(responseHolder);
EasyMock.replay(druidLeaderClient);
lookupReferencesManager.start();
lookupReferencesManager.add("testMock", new LookupExtractorFactoryContainer("0", lookupExtractorFactory));
lookupReferencesManager.handlePendingNotices();
@ -269,20 +264,20 @@ public class LookupReferencesManagerTest
lookupMap.put("testMockForGetNotThere", container);
String strResult = mapper.writeValueAsString(lookupMap);
Request request = new Request(HttpMethod.GET, new URL("http://localhost:1234/xx"));
expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes();
replay(config);
expect(druidLeaderClient.makeRequest(
EasyMock.expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes();
EasyMock.replay(config);
EasyMock.expect(druidLeaderClient.makeRequest(
HttpMethod.GET,
"/druid/coordinator/v1/lookups/config/lookupTier?detailed=true"
))
.andReturn(request);
.andReturn(request);
FullResponseHolder responseHolder = new FullResponseHolder(
HttpResponseStatus.OK,
EasyMock.createNiceMock(HttpResponse.class),
new StringBuilder().append(strResult)
);
expect(druidLeaderClient.go(request)).andReturn(responseHolder);
replay(druidLeaderClient);
EasyMock.expect(druidLeaderClient.go(request)).andReturn(responseHolder);
EasyMock.replay(druidLeaderClient);
lookupReferencesManager.start();
Assert.assertNull(lookupReferencesManager.get("notThere"));
}
@ -302,20 +297,20 @@ public class LookupReferencesManagerTest
lookupMap.put("testMockForUpdateWithHigherVersion", container);
String strResult = mapper.writeValueAsString(lookupMap);
Request request = new Request(HttpMethod.GET, new URL("http://localhost:1234/xx"));
expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes();
replay(config);
expect(druidLeaderClient.makeRequest(
EasyMock.expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes();
EasyMock.replay(config);
EasyMock.expect(druidLeaderClient.makeRequest(
HttpMethod.GET,
"/druid/coordinator/v1/lookups/config/lookupTier?detailed=true"
))
.andReturn(request);
.andReturn(request);
FullResponseHolder responseHolder = new FullResponseHolder(
HttpResponseStatus.OK,
EasyMock.createNiceMock(HttpResponse.class),
new StringBuilder().append(strResult)
);
expect(druidLeaderClient.go(request)).andReturn(responseHolder);
replay(druidLeaderClient);
EasyMock.expect(druidLeaderClient.go(request)).andReturn(responseHolder);
EasyMock.replay(druidLeaderClient);
lookupReferencesManager.start();
lookupReferencesManager.add("testName", new LookupExtractorFactoryContainer("1", lookupExtractorFactory1));
lookupReferencesManager.handlePendingNotices();
@ -339,20 +334,20 @@ public class LookupReferencesManagerTest
lookupMap.put("testMockForUpdateWithLowerVersion", container);
String strResult = mapper.writeValueAsString(lookupMap);
Request request = new Request(HttpMethod.GET, new URL("http://localhost:1234/xx"));
expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes();
replay(config);
expect(druidLeaderClient.makeRequest(
EasyMock.expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes();
EasyMock.replay(config);
EasyMock.expect(druidLeaderClient.makeRequest(
HttpMethod.GET,
"/druid/coordinator/v1/lookups/config/lookupTier?detailed=true"
))
.andReturn(request);
.andReturn(request);
FullResponseHolder responseHolder = new FullResponseHolder(
HttpResponseStatus.OK,
EasyMock.createNiceMock(HttpResponse.class),
new StringBuilder().append(strResult)
);
expect(druidLeaderClient.go(request)).andReturn(responseHolder);
replay(druidLeaderClient);
EasyMock.expect(druidLeaderClient.go(request)).andReturn(responseHolder);
EasyMock.replay(druidLeaderClient);
lookupReferencesManager.start();
lookupReferencesManager.add("testName", new LookupExtractorFactoryContainer("1", lookupExtractorFactory1));
lookupReferencesManager.handlePendingNotices();
@ -370,20 +365,20 @@ public class LookupReferencesManagerTest
lookupMap.put("testMockForRemoveNonExisting", container);
String strResult = mapper.writeValueAsString(lookupMap);
Request request = new Request(HttpMethod.GET, new URL("http://localhost:1234/xx"));
expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes();
replay(config);
expect(druidLeaderClient.makeRequest(
EasyMock.expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes();
EasyMock.replay(config);
EasyMock.expect(druidLeaderClient.makeRequest(
HttpMethod.GET,
"/druid/coordinator/v1/lookups/config/lookupTier?detailed=true"
))
.andReturn(request);
.andReturn(request);
FullResponseHolder responseHolder = new FullResponseHolder(
HttpResponseStatus.OK,
EasyMock.createNiceMock(HttpResponse.class),
new StringBuilder().append(strResult)
);
expect(druidLeaderClient.go(request)).andReturn(responseHolder);
replay(druidLeaderClient);
EasyMock.expect(druidLeaderClient.go(request)).andReturn(responseHolder);
EasyMock.replay(druidLeaderClient);
lookupReferencesManager.start();
lookupReferencesManager.remove("test");
lookupReferencesManager.handlePendingNotices();
@ -424,20 +419,20 @@ public class LookupReferencesManagerTest
Map<String, Object> lookupMap = new HashMap<>();
String strResult = mapper.writeValueAsString(lookupMap);
Request request = new Request(HttpMethod.GET, new URL("http://localhost:1234/xx"));
expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes();
replay(config);
expect(druidLeaderClient.makeRequest(
EasyMock.expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes();
EasyMock.replay(config);
EasyMock.expect(druidLeaderClient.makeRequest(
HttpMethod.GET,
"/druid/coordinator/v1/lookups/config/lookupTier?detailed=true"
))
.andReturn(request);
.andReturn(request);
FullResponseHolder responseHolder = new FullResponseHolder(
HttpResponseStatus.OK,
EasyMock.createNiceMock(HttpResponse.class),
new StringBuilder().append(strResult)
);
expect(druidLeaderClient.go(request)).andReturn(responseHolder);
replay(druidLeaderClient);
EasyMock.expect(druidLeaderClient.go(request)).andReturn(responseHolder);
EasyMock.replay(druidLeaderClient);
lookupReferencesManager.start();
lookupReferencesManager.add("one", container1);
lookupReferencesManager.add("two", container2);
@ -469,20 +464,20 @@ public class LookupReferencesManagerTest
lookupMap.put("testMockForRealModeWithMainThread", container);
String strResult = mapper.writeValueAsString(lookupMap);
Request request = new Request(HttpMethod.GET, new URL("http://localhost:1234/xx"));
expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes();
replay(config);
expect(druidLeaderClient.makeRequest(
EasyMock.expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes();
EasyMock.replay(config);
EasyMock.expect(druidLeaderClient.makeRequest(
HttpMethod.GET,
"/druid/coordinator/v1/lookups/config/lookupTier?detailed=true"
))
.andReturn(request);
.andReturn(request);
FullResponseHolder responseHolder = new FullResponseHolder(
HttpResponseStatus.OK,
EasyMock.createNiceMock(HttpResponse.class),
new StringBuilder().append(strResult)
);
expect(druidLeaderClient.go(request)).andReturn(responseHolder);
replay(druidLeaderClient);
EasyMock.expect(druidLeaderClient.go(request)).andReturn(responseHolder);
EasyMock.replay(druidLeaderClient);
lookupReferencesManager.start();
Assert.assertTrue(lookupReferencesManager.mainThread.isAlive());
@ -548,20 +543,20 @@ public class LookupReferencesManagerTest
lookupMap.put("testLookup3", container3);
String strResult = mapper.writeValueAsString(lookupMap);
Request request = new Request(HttpMethod.GET, new URL("http://localhost:1234/xx"));
expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes();
replay(config);
expect(druidLeaderClient.makeRequest(
EasyMock.expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes();
EasyMock.replay(config);
EasyMock.expect(druidLeaderClient.makeRequest(
HttpMethod.GET,
"/druid/coordinator/v1/lookups/config/lookupTier?detailed=true"
))
.andReturn(request);
.andReturn(request);
FullResponseHolder responseHolder = new FullResponseHolder(
HttpResponseStatus.OK,
EasyMock.createNiceMock(HttpResponse.class),
new StringBuilder().append(strResult)
);
expect(druidLeaderClient.go(request)).andReturn(responseHolder);
replay(druidLeaderClient);
EasyMock.expect(druidLeaderClient.go(request)).andReturn(responseHolder);
EasyMock.replay(druidLeaderClient);
lookupReferencesManager.start();
Assert.assertEquals(container1, lookupReferencesManager.get("testLookup1"));
@ -592,21 +587,21 @@ public class LookupReferencesManagerTest
lookupMap.put("testMockForLoadLookupOnCoordinatorFailure", container);
String strResult = mapper.writeValueAsString(lookupMap);
Request request = new Request(HttpMethod.GET, new URL("http://localhost:1234/xx"));
expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes();
replay(config);
expect(druidLeaderClient.makeRequest(
EasyMock.expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes();
EasyMock.replay(config);
EasyMock.expect(druidLeaderClient.makeRequest(
HttpMethod.GET,
"/druid/coordinator/v1/lookups/config/lookupTier?detailed=true"
))
.andReturn(request)
.anyTimes();
.andReturn(request)
.anyTimes();
FullResponseHolder responseHolder = new FullResponseHolder(
HttpResponseStatus.NOT_FOUND,
EasyMock.createNiceMock(HttpResponse.class),
new StringBuilder().append(strResult)
);
expect(druidLeaderClient.go(request)).andThrow(new IllegalStateException()).anyTimes();
replay(druidLeaderClient);
EasyMock.expect(druidLeaderClient.go(request)).andThrow(new IllegalStateException()).anyTimes();
EasyMock.replay(druidLeaderClient);
lookupReferencesManager.start();
lookupReferencesManager.add("testMockForLoadLookupOnCoordinatorFailure", container);
@ -628,18 +623,18 @@ public class LookupReferencesManagerTest
config,
true
);
reset(config);
reset(druidLeaderClient);
expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes();
replay(config);
expect(druidLeaderClient.makeRequest(
EasyMock.reset(config);
EasyMock.reset(druidLeaderClient);
EasyMock.expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes();
EasyMock.replay(config);
EasyMock.expect(druidLeaderClient.makeRequest(
HttpMethod.GET,
"/druid/coordinator/v1/lookups/config/lookupTier?detailed=true"
))
.andReturn(request)
.anyTimes();
expect(druidLeaderClient.go(request)).andThrow(new IllegalStateException()).anyTimes();
replay(druidLeaderClient);
.andReturn(request)
.anyTimes();
EasyMock.expect(druidLeaderClient.go(request)).andThrow(new IllegalStateException()).anyTimes();
EasyMock.replay(druidLeaderClient);
lookupReferencesManager.start();
Assert.assertEquals(container, lookupReferencesManager.get("testMockForLoadLookupOnCoordinatorFailure"));
}
@ -665,19 +660,19 @@ public class LookupReferencesManagerTest
lookupMap.put("testMockForDisableLookupSync", container);
String strResult = mapper.writeValueAsString(lookupMap);
Request request = new Request(HttpMethod.GET, new URL("http://localhost:1234/xx"));
expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes();
replay(config);
expect(druidLeaderClient.makeRequest(
EasyMock.expect(config.getLookupTier()).andReturn(LOOKUP_TIER).anyTimes();
EasyMock.replay(config);
EasyMock.expect(druidLeaderClient.makeRequest(
HttpMethod.GET,
"/druid/coordinator/v1/lookups/config/lookupTier?detailed=true"
))
.andReturn(request);
.andReturn(request);
FullResponseHolder responseHolder = new FullResponseHolder(
HttpResponseStatus.OK,
EasyMock.createNiceMock(HttpResponse.class),
new StringBuilder().append(strResult)
);
expect(druidLeaderClient.go(request)).andReturn(responseHolder);
EasyMock.expect(druidLeaderClient.go(request)).andReturn(responseHolder);
lookupReferencesManager.start();
Assert.assertNull(lookupReferencesManager.get("testMockForDisableLookupSync"));

View File

@ -36,18 +36,14 @@ import java.util.List;
import java.util.Map;
import java.util.Set;
import static org.apache.druid.server.StatusResource.ModuleVersion;
/**
*/
public class StatusResourceTest
{
@Test
public void testLoadedModules()
{
Collection<DruidModule> modules = ImmutableList.of((DruidModule) new InitializationTest.TestDruidModule());
List<ModuleVersion> statusResourceModuleList = new StatusResource.Status(modules).getModules();
Collection<DruidModule> modules = ImmutableList.of(new InitializationTest.TestDruidModule());
List<StatusResource.ModuleVersion> statusResourceModuleList = new StatusResource.Status(modules).getModules();
Assert.assertEquals("Status should have all modules loaded!", modules.size(), statusResourceModuleList.size());
@ -55,7 +51,7 @@ public class StatusResourceTest
String moduleName = module.getClass().getCanonicalName();
boolean contains = Boolean.FALSE;
for (ModuleVersion version : statusResourceModuleList) {
for (StatusResource.ModuleVersion version : statusResourceModuleList) {
if (version.getName().equals(moduleName)) {
contains = Boolean.TRUE;
}
@ -75,4 +71,3 @@ public class StatusResourceTest
hiddenProperties.forEach((property) -> Assert.assertNull(returnedProperties.get(property)));
}
}

View File

@ -30,6 +30,7 @@ import org.apache.druid.server.coordination.ServerType;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.NoneShardSpec;
import org.easymock.EasyMock;
import org.hamcrest.Matchers;
import org.joda.time.DateTime;
import org.joda.time.Interval;
import org.junit.After;
@ -47,13 +48,6 @@ import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Collectors;
import java.util.stream.IntStream;
import static org.easymock.EasyMock.anyObject;
import static org.easymock.EasyMock.replay;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.is;
/**
*/
public class DruidCoordinatorBalancerTest
{
private static final int MAX_SEGMENTS_TO_MOVE = 5;
@ -222,14 +216,14 @@ public class DruidCoordinatorBalancerTest
EasyMock.expect(strategy.pickSegmentToMove(ImmutableList.of(new ServerHolder(druidServer2, peon2, false))))
.andReturn(new BalancerSegmentHolder(druidServer2, segment3))
.andReturn(new BalancerSegmentHolder(druidServer2, segment4));
EasyMock.expect(strategy.pickSegmentToMove(anyObject()))
EasyMock.expect(strategy.pickSegmentToMove(EasyMock.anyObject()))
.andReturn(new BalancerSegmentHolder(druidServer1, segment1))
.andReturn(new BalancerSegmentHolder(druidServer1, segment2));
EasyMock.expect(strategy.findNewSegmentHomeBalancer(anyObject(), anyObject()))
EasyMock.expect(strategy.findNewSegmentHomeBalancer(EasyMock.anyObject(), EasyMock.anyObject()))
.andReturn(new ServerHolder(druidServer3, peon3))
.anyTimes();
replay(strategy);
EasyMock.replay(strategy);
DruidCoordinatorRuntimeParams params = defaultRuntimeParamsBuilder(
ImmutableList.of(druidServer1, druidServer2, druidServer3),
@ -247,7 +241,10 @@ public class DruidCoordinatorBalancerTest
params = new DruidCoordinatorBalancerTester(coordinator).run(params);
Assert.assertEquals(3L, params.getCoordinatorStats().getTieredStat("movedCount", "normal"));
Assert.assertThat(peon3.getSegmentsToLoad(), is(equalTo(ImmutableSet.of(segment1, segment3, segment4))));
Assert.assertThat(
peon3.getSegmentsToLoad(),
Matchers.is(Matchers.equalTo(ImmutableSet.of(segment1, segment3, segment4)))
);
}
@Test
@ -256,7 +253,7 @@ public class DruidCoordinatorBalancerTest
DruidCoordinatorRuntimeParams params = setupParamsForDecommissioningMaxPercentOfMaxSegmentsToMove(0);
params = new DruidCoordinatorBalancerTester(coordinator).run(params);
Assert.assertEquals(1L, params.getCoordinatorStats().getTieredStat("movedCount", "normal"));
Assert.assertThat(peon3.getSegmentsToLoad(), is(equalTo(ImmutableSet.of(segment1))));
Assert.assertThat(peon3.getSegmentsToLoad(), Matchers.is(Matchers.equalTo(ImmutableSet.of(segment1))));
}
@Test
@ -265,7 +262,7 @@ public class DruidCoordinatorBalancerTest
DruidCoordinatorRuntimeParams params = setupParamsForDecommissioningMaxPercentOfMaxSegmentsToMove(10);
params = new DruidCoordinatorBalancerTester(coordinator).run(params);
Assert.assertEquals(1L, params.getCoordinatorStats().getTieredStat("movedCount", "normal"));
Assert.assertThat(peon3.getSegmentsToLoad(), is(equalTo(ImmutableSet.of(segment2))));
Assert.assertThat(peon3.getSegmentsToLoad(), Matchers.is(Matchers.equalTo(ImmutableSet.of(segment2))));
}
/**
@ -283,16 +280,16 @@ public class DruidCoordinatorBalancerTest
mockCoordinator(coordinator);
BalancerStrategy strategy = EasyMock.createMock(BalancerStrategy.class);
EasyMock.expect(strategy.pickSegmentToMove(anyObject()))
.andReturn(new BalancerSegmentHolder(druidServer1, segment1))
.andReturn(new BalancerSegmentHolder(druidServer1, segment2))
.andReturn(new BalancerSegmentHolder(druidServer2, segment3))
.andReturn(new BalancerSegmentHolder(druidServer2, segment4));
EasyMock.expect(strategy.pickSegmentToMove(EasyMock.anyObject()))
.andReturn(new BalancerSegmentHolder(druidServer1, segment1))
.andReturn(new BalancerSegmentHolder(druidServer1, segment2))
.andReturn(new BalancerSegmentHolder(druidServer2, segment3))
.andReturn(new BalancerSegmentHolder(druidServer2, segment4));
EasyMock.expect(strategy.findNewSegmentHomeBalancer(anyObject(), anyObject()))
EasyMock.expect(strategy.findNewSegmentHomeBalancer(EasyMock.anyObject(), EasyMock.anyObject()))
.andReturn(new ServerHolder(druidServer3, peon3))
.anyTimes();
replay(strategy);
EasyMock.replay(strategy);
DruidCoordinatorRuntimeParams params = defaultRuntimeParamsBuilder(
ImmutableList.of(druidServer1, druidServer2, druidServer3),
@ -300,14 +297,20 @@ public class DruidCoordinatorBalancerTest
ImmutableList.of(false, false, false)
)
.withDynamicConfigs(
CoordinatorDynamicConfig.builder().withMaxSegmentsToMove(3).withDecommissioningMaxPercentOfMaxSegmentsToMove(9).build()
CoordinatorDynamicConfig.builder()
.withMaxSegmentsToMove(3)
.withDecommissioningMaxPercentOfMaxSegmentsToMove(9)
.build()
)
.withBalancerStrategy(strategy)
.build();
params = new DruidCoordinatorBalancerTester(coordinator).run(params);
Assert.assertEquals(3L, params.getCoordinatorStats().getTieredStat("movedCount", "normal"));
Assert.assertThat(peon3.getSegmentsToLoad(), is(equalTo(ImmutableSet.of(segment1, segment2, segment3))));
Assert.assertThat(
peon3.getSegmentsToLoad(),
Matchers.is(Matchers.equalTo(ImmutableSet.of(segment1, segment2, segment3)))
);
}
/**
@ -319,20 +322,20 @@ public class DruidCoordinatorBalancerTest
mockDruidServer(druidServer1, "1", "normal", 30L, 100L, segments);
mockDruidServer(druidServer2, "2", "normal", 0L, 100L, Collections.emptyList());
replay(druidServer3);
replay(druidServer4);
EasyMock.replay(druidServer3);
EasyMock.replay(druidServer4);
mockCoordinator(coordinator);
BalancerStrategy strategy = EasyMock.createMock(BalancerStrategy.class);
EasyMock.expect(strategy.pickSegmentToMove(anyObject()))
EasyMock.expect(strategy.pickSegmentToMove(EasyMock.anyObject()))
.andReturn(new BalancerSegmentHolder(druidServer1, segment1))
.anyTimes();
EasyMock.expect(strategy.findNewSegmentHomeBalancer(anyObject(), anyObject())).andAnswer(() -> {
EasyMock.expect(strategy.findNewSegmentHomeBalancer(EasyMock.anyObject(), EasyMock.anyObject())).andAnswer(() -> {
List<ServerHolder> holders = (List<ServerHolder>) EasyMock.getCurrentArguments()[1];
return holders.get(0);
}).anyTimes();
replay(strategy);
EasyMock.replay(strategy);
DruidCoordinatorRuntimeParams params = defaultRuntimeParamsBuilder(
ImmutableList.of(druidServer1, druidServer2),
@ -352,18 +355,20 @@ public class DruidCoordinatorBalancerTest
mockDruidServer(druidServer1, "1", "normal", 30L, 100L, segments);
mockDruidServer(druidServer2, "2", "normal", 0L, 100L, Collections.emptyList());
replay(druidServer3);
replay(druidServer4);
EasyMock.replay(druidServer3);
EasyMock.replay(druidServer4);
mockCoordinator(coordinator);
ServerHolder holder2 = new ServerHolder(druidServer2, peon2, false);
BalancerStrategy strategy = EasyMock.createMock(BalancerStrategy.class);
EasyMock.expect(strategy.pickSegmentToMove(anyObject()))
EasyMock.expect(strategy.pickSegmentToMove(EasyMock.anyObject()))
.andReturn(new BalancerSegmentHolder(druidServer1, segment1))
.once();
EasyMock.expect(strategy.findNewSegmentHomeBalancer(anyObject(), anyObject())).andReturn(holder2).once();
replay(strategy);
EasyMock.expect(strategy.findNewSegmentHomeBalancer(EasyMock.anyObject(), EasyMock.anyObject()))
.andReturn(holder2)
.once();
EasyMock.replay(strategy);
DruidCoordinatorRuntimeParams params = defaultRuntimeParamsBuilder(
ImmutableList.of(druidServer1, druidServer2),
@ -566,8 +571,8 @@ public class DruidCoordinatorBalancerTest
s -> EasyMock.expect(druidServer.getSegment(s.getId())).andReturn(s).anyTimes()
);
}
EasyMock.expect(druidServer.getSegment(anyObject())).andReturn(null).anyTimes();
replay(druidServer);
EasyMock.expect(druidServer.getSegment(EasyMock.anyObject())).andReturn(null).anyTimes();
EasyMock.replay(druidServer);
}
private static void mockCoordinator(DruidCoordinator coordinator)
@ -588,7 +593,7 @@ public class DruidCoordinatorBalancerTest
private final List<BalancerSegmentHolder> pickOrder;
private final AtomicInteger pickCounter = new AtomicInteger(0);
public PredefinedPickOrderBalancerStrategy(
PredefinedPickOrderBalancerStrategy(
BalancerStrategy delegate,
List<BalancerSegmentHolder> pickOrder
)
@ -636,12 +641,12 @@ public class DruidCoordinatorBalancerTest
BalancerStrategy strategy = EasyMock.createMock(BalancerStrategy.class);
EasyMock.expect(strategy.pickSegmentToMove(ImmutableList.of(new ServerHolder(druidServer2, peon2, true))))
.andReturn(new BalancerSegmentHolder(druidServer2, segment2));
EasyMock.expect(strategy.pickSegmentToMove(anyObject()))
EasyMock.expect(strategy.pickSegmentToMove(EasyMock.anyObject()))
.andReturn(new BalancerSegmentHolder(druidServer1, segment1));
EasyMock.expect(strategy.findNewSegmentHomeBalancer(anyObject(), anyObject()))
EasyMock.expect(strategy.findNewSegmentHomeBalancer(EasyMock.anyObject(), EasyMock.anyObject()))
.andReturn(new ServerHolder(druidServer3, peon3))
.anyTimes();
replay(strategy);
EasyMock.replay(strategy);
return defaultRuntimeParamsBuilder(
ImmutableList.of(druidServer1, druidServer2, druidServer3),

View File

@ -23,6 +23,7 @@ import org.apache.druid.java.util.common.DateTimes;
import org.apache.druid.timeline.DataSegment;
import org.joda.time.DateTime;
import org.joda.time.Interval;
import org.junit.Assert;
import org.junit.Test;
import java.util.ArrayList;
@ -32,10 +33,6 @@ import java.util.UUID;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.TimeUnit;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class SegmentsCostCacheTest
{
@ -49,7 +46,7 @@ public class SegmentsCostCacheTest
SegmentsCostCache.Builder cacheBuilder = SegmentsCostCache.builder();
cacheBuilder.addSegment(createSegment(DATA_SOURCE, shifted1HInterval(REFERENCE_TIME, 0), 100));
SegmentsCostCache cache = cacheBuilder.build();
assertEquals(
Assert.assertEquals(
7.8735899489011E-4,
cache.cost(createSegment(DATA_SOURCE, shifted1HInterval(REFERENCE_TIME, -2), 100)),
EPSILON
@ -64,7 +61,7 @@ public class SegmentsCostCacheTest
createSegment(DATA_SOURCE, shifted1HInterval(REFERENCE_TIME, 0), 100)
);
SegmentsCostCache cache = cacheBuilder.build();
assertEquals(
Assert.assertEquals(
0,
cache.cost(createSegment(DATA_SOURCE, shifted1HInterval(REFERENCE_TIME, (int) TimeUnit.DAYS.toHours(50)), 100)),
EPSILON
@ -86,7 +83,7 @@ public class SegmentsCostCacheTest
SegmentsCostCache.Bucket bucket = prototype.build();
double segmentCost = bucket.cost(segmentB);
assertEquals(7.8735899489011E-4, segmentCost, EPSILON);
Assert.assertEquals(7.8735899489011E-4, segmentCost, EPSILON);
}
@Test
@ -105,8 +102,8 @@ public class SegmentsCostCacheTest
prototype.addSegment(segmentA);
SegmentsCostCache.Bucket bucket = prototype.build();
assertTrue(bucket.inCalculationInterval(segmentA));
assertFalse(bucket.inCalculationInterval(segmentB));
Assert.assertTrue(bucket.inCalculationInterval(segmentA));
Assert.assertFalse(bucket.inCalculationInterval(segmentB));
}
@Test
@ -124,7 +121,7 @@ public class SegmentsCostCacheTest
SegmentsCostCache.Bucket bucket = prototype.build();
double segmentCost = bucket.cost(segmentB);
assertEquals(8.26147353873985E-4, segmentCost, EPSILON);
Assert.assertEquals(8.26147353873985E-4, segmentCost, EPSILON);
}
@Test
@ -145,7 +142,7 @@ public class SegmentsCostCacheTest
double segmentCost = bucket.cost(segmentB);
assertEquals(0.001574717989780039, segmentCost, EPSILON);
Assert.assertEquals(0.001574717989780039, segmentCost, EPSILON);
}
@Test
@ -167,10 +164,10 @@ public class SegmentsCostCacheTest
SegmentsCostCache.Bucket bucket = prototype.build();
double cost = bucket.cost(referenceSegment);
assertEquals(0.7065117101966677, cost, EPSILON);
Assert.assertEquals(0.7065117101966677, cost, EPSILON);
}
public static Interval shifted1HInterval(DateTime REFERENCE_TIME, int shiftInHours)
private static Interval shifted1HInterval(DateTime REFERENCE_TIME, int shiftInHours)
{
return new Interval(
REFERENCE_TIME.plusHours(shiftInHours),
@ -178,7 +175,7 @@ public class SegmentsCostCacheTest
);
}
public static DataSegment createSegment(String dataSource, Interval interval, long size)
private static DataSegment createSegment(String dataSource, Interval interval, long size)
{
return new DataSegment(
dataSource,

View File

@ -26,6 +26,7 @@ import com.google.common.collect.Lists;
import org.apache.druid.jackson.DefaultObjectMapper;
import org.apache.druid.java.util.common.Intervals;
import org.joda.time.Period;
import org.junit.Assert;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
@ -34,8 +35,6 @@ import java.io.IOException;
import java.util.Collections;
import java.util.List;
import static org.junit.Assert.assertEquals;
@RunWith(Parameterized.class)
public class BroadcastDistributionRuleSerdeTest
{
@ -44,7 +43,7 @@ public class BroadcastDistributionRuleSerdeTest
@Parameterized.Parameters
public static List<Object[]> constructorFeeder()
{
final List<Object[]> params = Lists.newArrayList(
return Lists.newArrayList(
new Object[]{new ForeverBroadcastDistributionRule(ImmutableList.of("large_source1", "large_source2"))},
new Object[]{new ForeverBroadcastDistributionRule(ImmutableList.of())},
new Object[]{new ForeverBroadcastDistributionRule(null)},
@ -55,7 +54,6 @@ public class BroadcastDistributionRuleSerdeTest
new Object[]{new PeriodBroadcastDistributionRule(new Period(1000), null, ImmutableList.of())},
new Object[]{new PeriodBroadcastDistributionRule(new Period(1000), null, null)}
);
return params;
}
private final Rule testRule;
@ -71,6 +69,6 @@ public class BroadcastDistributionRuleSerdeTest
final List<Rule> rules = Collections.singletonList(testRule);
final String json = MAPPER.writeValueAsString(rules);
final List<Rule> fromJson = MAPPER.readValue(json, new TypeReference<List<Rule>>(){});
assertEquals(rules, fromJson);
Assert.assertEquals(rules, fromJson);
}
}

View File

@ -33,6 +33,7 @@ import org.apache.druid.server.coordinator.SegmentReplicantLookup;
import org.apache.druid.server.coordinator.ServerHolder;
import org.apache.druid.timeline.DataSegment;
import org.apache.druid.timeline.partition.NoneShardSpec;
import org.junit.Assert;
import org.junit.Before;
import org.junit.Test;
@ -44,16 +45,12 @@ import java.util.TreeSet;
import java.util.stream.Collectors;
import java.util.stream.Stream;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
public class BroadcastDistributionRuleTest
{
private DruidCluster druidCluster;
private ServerHolder holderOfSmallSegment;
private List<ServerHolder> holdersOfLargeSegments = new ArrayList<>();
private List<ServerHolder> holdersOfLargeSegments2 = new ArrayList<>();
private final List<ServerHolder> holdersOfLargeSegments = new ArrayList<>();
private final List<ServerHolder> holdersOfLargeSegments2 = new ArrayList<>();
private final List<DataSegment> largeSegments = new ArrayList<>();
private final List<DataSegment> largeSegments2 = new ArrayList<>();
private DataSegment smallSegment;
@ -297,20 +294,20 @@ public class BroadcastDistributionRuleTest
smallSegment
);
assertEquals(3L, stats.getGlobalStat(LoadRule.ASSIGNED_COUNT));
assertEquals(false, stats.hasPerTierStats());
Assert.assertEquals(3L, stats.getGlobalStat(LoadRule.ASSIGNED_COUNT));
Assert.assertFalse(stats.hasPerTierStats());
assertTrue(
Assert.assertTrue(
holdersOfLargeSegments.stream()
.allMatch(holder -> holder.getPeon().getSegmentsToLoad().contains(smallSegment))
);
assertTrue(
Assert.assertTrue(
holdersOfLargeSegments2.stream()
.noneMatch(holder -> holder.getPeon().getSegmentsToLoad().contains(smallSegment))
);
assertFalse(holderOfSmallSegment.getPeon().getSegmentsToLoad().contains(smallSegment));
Assert.assertFalse(holderOfSmallSegment.getPeon().getSegmentsToLoad().contains(smallSegment));
}
/**
@ -320,7 +317,7 @@ public class BroadcastDistributionRuleTest
* active | large segment
* decommissioning1 | small segment
* decommissioning2 | large segment
*
* <p>
* After running the rule for the small segment:
* active | large & small segments
* decommissioning1 |
@ -345,12 +342,12 @@ public class BroadcastDistributionRuleTest
smallSegment
);
assertEquals(1L, stats.getGlobalStat(LoadRule.ASSIGNED_COUNT));
assertEquals(false, stats.hasPerTierStats());
Assert.assertEquals(1L, stats.getGlobalStat(LoadRule.ASSIGNED_COUNT));
Assert.assertFalse(stats.hasPerTierStats());
assertEquals(1, activeServer.getPeon().getSegmentsToLoad().size());
assertEquals(1, decommissioningServer1.getPeon().getSegmentsToDrop().size());
assertEquals(0, decommissioningServer2.getPeon().getSegmentsToLoad().size());
Assert.assertEquals(1, activeServer.getPeon().getSegmentsToLoad().size());
Assert.assertEquals(1, decommissioningServer1.getPeon().getSegmentsToDrop().size());
Assert.assertEquals(0, decommissioningServer2.getPeon().getSegmentsToLoad().size());
}
@Test
@ -377,20 +374,20 @@ public class BroadcastDistributionRuleTest
smallSegment
);
assertEquals(5L, stats.getGlobalStat(LoadRule.ASSIGNED_COUNT));
assertEquals(false, stats.hasPerTierStats());
Assert.assertEquals(5L, stats.getGlobalStat(LoadRule.ASSIGNED_COUNT));
Assert.assertFalse(stats.hasPerTierStats());
assertTrue(
Assert.assertTrue(
holdersOfLargeSegments.stream()
.allMatch(holder -> holder.getPeon().getSegmentsToLoad().contains(smallSegment))
);
assertTrue(
Assert.assertTrue(
holdersOfLargeSegments2.stream()
.allMatch(holder -> holder.getPeon().getSegmentsToLoad().contains(smallSegment))
);
assertFalse(holderOfSmallSegment.getPeon().getSegmentsToLoad().contains(smallSegment));
Assert.assertFalse(holderOfSmallSegment.getPeon().getSegmentsToLoad().contains(smallSegment));
}
@Test
@ -415,10 +412,10 @@ public class BroadcastDistributionRuleTest
smallSegment
);
assertEquals(6L, stats.getGlobalStat(LoadRule.ASSIGNED_COUNT));
assertEquals(false, stats.hasPerTierStats());
Assert.assertEquals(6L, stats.getGlobalStat(LoadRule.ASSIGNED_COUNT));
Assert.assertFalse(stats.hasPerTierStats());
assertTrue(
Assert.assertTrue(
druidCluster.getAllServers().stream()
.allMatch(holder -> holder.getPeon().getSegmentsToLoad().contains(smallSegment))
);