mirror of https://github.com/apache/druid.git
Fix ClientInfoResource
Fix ClientInfoResource so that it doesn't return empty list for overshadowed segments
This commit is contained in:
parent
166c4fcf46
commit
57c8e56451
|
@ -42,6 +42,7 @@ import javax.ws.rs.Produces;
|
||||||
import javax.ws.rs.QueryParam;
|
import javax.ws.rs.QueryParam;
|
||||||
import javax.ws.rs.core.MediaType;
|
import javax.ws.rs.core.MediaType;
|
||||||
|
|
||||||
|
import io.druid.timeline.partition.PartitionHolder;
|
||||||
import org.joda.time.DateTime;
|
import org.joda.time.DateTime;
|
||||||
import org.joda.time.Interval;
|
import org.joda.time.Interval;
|
||||||
|
|
||||||
|
@ -149,30 +150,23 @@ public class ClientInfoResource
|
||||||
);
|
);
|
||||||
|
|
||||||
for (TimelineObjectHolder<String, ServerSelector> holder : serversLookup) {
|
for (TimelineObjectHolder<String, ServerSelector> holder : serversLookup) {
|
||||||
|
final Set<Object> dimensions = Sets.newHashSet();
|
||||||
|
final Set<Object> metrics = Sets.newHashSet();
|
||||||
|
final PartitionHolder<ServerSelector> partitionHolder = holder.getObject();
|
||||||
|
if (partitionHolder.isComplete()) {
|
||||||
|
for (ServerSelector server : partitionHolder.payloads()) {
|
||||||
|
final DataSegment segment = server.getSegment();
|
||||||
|
dimensions.addAll(segment.getDimensions());
|
||||||
|
metrics.addAll(segment.getMetrics());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
servedIntervals.put(
|
servedIntervals.put(
|
||||||
holder.getInterval(),
|
holder.getInterval(),
|
||||||
ImmutableMap.of(KEY_DIMENSIONS, Sets.newHashSet(), KEY_METRICS, Sets.newHashSet())
|
ImmutableMap.of(KEY_DIMENSIONS, dimensions, KEY_METRICS, metrics)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
List<DataSegment> segments = getSegmentsForDatasources().get(dataSourceName);
|
|
||||||
if (segments == null || segments.isEmpty()) {
|
|
||||||
log.error(
|
|
||||||
"Found no DataSegments but TimelineServerView has served intervals. Datasource = %s , Interval = %s",
|
|
||||||
dataSourceName,
|
|
||||||
theInterval
|
|
||||||
);
|
|
||||||
throw new RuntimeException("Internal Error");
|
|
||||||
}
|
|
||||||
|
|
||||||
for (DataSegment segment : segments) {
|
|
||||||
if (servedIntervals.containsKey(segment.getInterval())) {
|
|
||||||
Map<String, Set<String>> columns = (Map<String, Set<String>>) servedIntervals.get(segment.getInterval());
|
|
||||||
columns.get(KEY_DIMENSIONS).addAll(segment.getDimensions());
|
|
||||||
columns.get(KEY_METRICS).addAll(segment.getMetrics());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
//collapse intervals if they abut and have same set of columns
|
//collapse intervals if they abut and have same set of columns
|
||||||
Map<String, Object> result = Maps.newLinkedHashMap();
|
Map<String, Object> result = Maps.newLinkedHashMap();
|
||||||
Interval curr = null;
|
Interval curr = null;
|
||||||
|
|
|
@ -17,9 +17,10 @@
|
||||||
|
|
||||||
package io.druid.server;
|
package io.druid.server;
|
||||||
|
|
||||||
import java.util.List;
|
import com.google.common.collect.ImmutableList;
|
||||||
import java.util.Map;
|
import com.google.common.collect.ImmutableMap;
|
||||||
|
import com.google.common.collect.ImmutableSet;
|
||||||
|
import com.google.common.collect.Ordering;
|
||||||
import io.druid.client.DruidServer;
|
import io.druid.client.DruidServer;
|
||||||
import io.druid.client.InventoryView;
|
import io.druid.client.InventoryView;
|
||||||
import io.druid.client.TimelineServerView;
|
import io.druid.client.TimelineServerView;
|
||||||
|
@ -28,8 +29,9 @@ import io.druid.query.TableDataSource;
|
||||||
import io.druid.query.metadata.SegmentMetadataQueryConfig;
|
import io.druid.query.metadata.SegmentMetadataQueryConfig;
|
||||||
import io.druid.timeline.DataSegment;
|
import io.druid.timeline.DataSegment;
|
||||||
import io.druid.timeline.VersionedIntervalTimeline;
|
import io.druid.timeline.VersionedIntervalTimeline;
|
||||||
|
import io.druid.timeline.partition.NumberedShardSpec;
|
||||||
|
import io.druid.timeline.partition.ShardSpec;
|
||||||
import io.druid.timeline.partition.SingleElementPartitionChunk;
|
import io.druid.timeline.partition.SingleElementPartitionChunk;
|
||||||
|
|
||||||
import org.easymock.EasyMock;
|
import org.easymock.EasyMock;
|
||||||
import org.joda.time.DateTime;
|
import org.joda.time.DateTime;
|
||||||
import org.joda.time.Interval;
|
import org.joda.time.Interval;
|
||||||
|
@ -37,10 +39,8 @@ import org.junit.Assert;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
import com.google.common.collect.ImmutableList;
|
import java.util.List;
|
||||||
import com.google.common.collect.ImmutableMap;
|
import java.util.Map;
|
||||||
import com.google.common.collect.ImmutableSet;
|
|
||||||
import com.google.common.collect.Ordering;
|
|
||||||
|
|
||||||
public class ClientInfoResourceTest
|
public class ClientInfoResourceTest
|
||||||
{
|
{
|
||||||
|
@ -50,7 +50,6 @@ public class ClientInfoResourceTest
|
||||||
|
|
||||||
|
|
||||||
private final String dataSource = "test-data-source";
|
private final String dataSource = "test-data-source";
|
||||||
private final String version = "v0";
|
|
||||||
|
|
||||||
private InventoryView serverInventoryView;
|
private InventoryView serverInventoryView;
|
||||||
private TimelineServerView timelineServerView;
|
private TimelineServerView timelineServerView;
|
||||||
|
@ -62,11 +61,55 @@ public class ClientInfoResourceTest
|
||||||
VersionedIntervalTimeline<String, ServerSelector> timeline = new VersionedIntervalTimeline<>(Ordering.<String>natural());
|
VersionedIntervalTimeline<String, ServerSelector> timeline = new VersionedIntervalTimeline<>(Ordering.<String>natural());
|
||||||
DruidServer server = new DruidServer("name", "host", 1234, "type", "tier", 0);
|
DruidServer server = new DruidServer("name", "host", 1234, "type", "tier", 0);
|
||||||
|
|
||||||
addSegment(timeline, server, "1960-02-13/1961-02-14", ImmutableList.of("d1"), ImmutableList.of("m1"));
|
addSegment(timeline, server, "1960-02-13/1961-02-14", ImmutableList.of("d5"), ImmutableList.of("m5"), "v0");
|
||||||
addSegment(timeline, server, "2014-02-13/2014-02-14", ImmutableList.of("d1"), ImmutableList.of("m1"));
|
|
||||||
addSegment(timeline, server, "2014-02-14/2014-02-15", ImmutableList.of("d1"), ImmutableList.of("m1"));
|
// segments within [2014-02-13, 2014-02-18]
|
||||||
addSegment(timeline, server, "2014-02-16/2014-02-17", ImmutableList.of("d1"), ImmutableList.of("m1"));
|
addSegment(timeline, server, "2014-02-13/2014-02-14", ImmutableList.of("d1"), ImmutableList.of("m1"), "v0");
|
||||||
addSegment(timeline, server, "2014-02-17/2014-02-18", ImmutableList.of("d2"), ImmutableList.of("m2"));
|
addSegment(timeline, server, "2014-02-14/2014-02-15", ImmutableList.of("d1"), ImmutableList.of("m1"), "v0");
|
||||||
|
addSegment(timeline, server, "2014-02-16/2014-02-17", ImmutableList.of("d1"), ImmutableList.of("m1"), "v0");
|
||||||
|
addSegment(timeline, server, "2014-02-17/2014-02-18", ImmutableList.of("d2"), ImmutableList.of("m2"), "v0");
|
||||||
|
|
||||||
|
// segments within [2015-02-01, 2015-02-13]
|
||||||
|
addSegment(timeline, server, "2015-02-01/2015-02-07", ImmutableList.of("d1"), ImmutableList.of("m1"), "v1");
|
||||||
|
addSegment(timeline, server, "2015-02-07/2015-02-13", ImmutableList.of("d1"), ImmutableList.of("m1"), "v1");
|
||||||
|
addSegmentWithShardSpec(
|
||||||
|
timeline, server, "2015-02-03/2015-02-05",
|
||||||
|
ImmutableList.of("d1", "d2"),
|
||||||
|
ImmutableList.of("m1", "m2"),
|
||||||
|
"v2",
|
||||||
|
new NumberedShardSpec(0, 2)
|
||||||
|
);
|
||||||
|
addSegmentWithShardSpec(
|
||||||
|
timeline, server, "2015-02-03/2015-02-05",
|
||||||
|
ImmutableList.of("d1", "d2", "d3"),
|
||||||
|
ImmutableList.of("m1", "m2", "m3"),
|
||||||
|
"v2",
|
||||||
|
new NumberedShardSpec(1, 2)
|
||||||
|
);
|
||||||
|
addSegment(
|
||||||
|
timeline,
|
||||||
|
server,
|
||||||
|
"2015-02-09/2015-02-10",
|
||||||
|
ImmutableList.of("d1", "d3"),
|
||||||
|
ImmutableList.of("m1", "m3"),
|
||||||
|
"v2"
|
||||||
|
);
|
||||||
|
addSegment(timeline, server, "2015-02-11/2015-02-12", ImmutableList.of("d3"), ImmutableList.of("m3"), "v2");
|
||||||
|
|
||||||
|
// segments within [2015-03-13, 2015-03-19]
|
||||||
|
addSegment(timeline, server, "2015-03-13/2015-03-19", ImmutableList.of("d1"), ImmutableList.of("m1"), "v3");
|
||||||
|
addSegment(timeline, server, "2015-03-13/2015-03-14", ImmutableList.of("d1"), ImmutableList.of("m1"), "v4");
|
||||||
|
addSegment(timeline, server, "2015-03-14/2015-03-15", ImmutableList.of("d1"), ImmutableList.of("m1"), "v5");
|
||||||
|
addSegment(timeline, server, "2015-03-15/2015-03-16", ImmutableList.of("d1"), ImmutableList.of("m1"), "v6");
|
||||||
|
|
||||||
|
// imcomplete segment
|
||||||
|
addSegmentWithShardSpec(
|
||||||
|
timeline, server, "2015-04-03/2015-04-05",
|
||||||
|
ImmutableList.of("d4"),
|
||||||
|
ImmutableList.of("m4"),
|
||||||
|
"v7",
|
||||||
|
new NumberedShardSpec(0, 2)
|
||||||
|
);
|
||||||
|
|
||||||
serverInventoryView = EasyMock.createMock(InventoryView.class);
|
serverInventoryView = EasyMock.createMock(InventoryView.class);
|
||||||
EasyMock.expect(serverInventoryView.getInventory()).andReturn(ImmutableList.of(server)).anyTimes();
|
EasyMock.expect(serverInventoryView.getInventory()).andReturn(ImmutableList.of(server)).anyTimes();
|
||||||
|
@ -83,10 +126,10 @@ public class ClientInfoResourceTest
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testGetDatasourceNonFullWithLargeInterval()
|
public void testGetDatasourceNonFullWithInterval()
|
||||||
{
|
{
|
||||||
Map<String, Object> actual = resource.getDatasource(dataSource, "1975/2050", null);
|
Map<String, Object> actual = resource.getDatasource(dataSource, "1975/2015", null);
|
||||||
Map<String, ?> expected = ImmutableMap.of(
|
Map<String, Object> expected = ImmutableMap.<String, Object>of(
|
||||||
KEY_DIMENSIONS, ImmutableSet.of("d1", "d2"),
|
KEY_DIMENSIONS, ImmutableSet.of("d1", "d2"),
|
||||||
KEY_METRICS, ImmutableSet.of("m1", "m2")
|
KEY_METRICS, ImmutableSet.of("m1", "m2")
|
||||||
);
|
);
|
||||||
|
@ -95,11 +138,10 @@ public class ClientInfoResourceTest
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testGetDatasourceFullWithLargeInterval()
|
public void testGetDatasourceFullWithInterval()
|
||||||
{
|
{
|
||||||
|
Map<String, Object> actual = resource.getDatasource(dataSource, "1975/2015", "true");
|
||||||
Map<String, Object> actual = resource.getDatasource(dataSource, "1975/2050", "true");
|
Map<String, Object> expected = ImmutableMap.<String, Object>of(
|
||||||
Map<String, ?> expected = ImmutableMap.of(
|
|
||||||
"2014-02-13T00:00:00.000Z/2014-02-15T00:00:00.000Z",
|
"2014-02-13T00:00:00.000Z/2014-02-15T00:00:00.000Z",
|
||||||
ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")),
|
ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")),
|
||||||
"2014-02-16T00:00:00.000Z/2014-02-17T00:00:00.000Z",
|
"2014-02-16T00:00:00.000Z/2014-02-17T00:00:00.000Z",
|
||||||
|
@ -120,7 +162,7 @@ public class ClientInfoResourceTest
|
||||||
"2014-02-13T09:00:00.000Z/2014-02-17T23:00:00.000Z",
|
"2014-02-13T09:00:00.000Z/2014-02-17T23:00:00.000Z",
|
||||||
"true"
|
"true"
|
||||||
);
|
);
|
||||||
Map<String, ?> expected = ImmutableMap.of(
|
Map<String, Object> expected = ImmutableMap.<String, Object>of(
|
||||||
"2014-02-13T09:00:00.000Z/2014-02-15T00:00:00.000Z",
|
"2014-02-13T09:00:00.000Z/2014-02-15T00:00:00.000Z",
|
||||||
ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")),
|
ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")),
|
||||||
"2014-02-16T00:00:00.000Z/2014-02-17T00:00:00.000Z",
|
"2014-02-16T00:00:00.000Z/2014-02-17T00:00:00.000Z",
|
||||||
|
@ -136,8 +178,10 @@ public class ClientInfoResourceTest
|
||||||
@Test
|
@Test
|
||||||
public void testGetDatasourceWithDefaultInterval()
|
public void testGetDatasourceWithDefaultInterval()
|
||||||
{
|
{
|
||||||
Map<String, Object> actual = resource.getDatasource(dataSource, null, "false");
|
Map<String, Object> actual = resource.getDatasource(dataSource, null, null);
|
||||||
Assert.assertEquals(actual.size(), 0);
|
Map<String, Object> expected = ImmutableMap.<String, Object>of(KEY_DIMENSIONS, ImmutableSet.of(), KEY_METRICS, ImmutableSet.of());
|
||||||
|
|
||||||
|
Assert.assertEquals(expected, actual);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
|
@ -148,28 +192,160 @@ public class ClientInfoResourceTest
|
||||||
new SegmentMetadataQueryConfig("P100Y")
|
new SegmentMetadataQueryConfig("P100Y")
|
||||||
);
|
);
|
||||||
|
|
||||||
Map<String, ?> expected = ImmutableMap.of(
|
Map<String, Object> expected = ImmutableMap.<String, Object>of(
|
||||||
"1960-02-13T00:00:00.000Z/1961-02-14T00:00:00.000Z",
|
KEY_DIMENSIONS,
|
||||||
ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")),
|
ImmutableSet.of("d1", "d2", "d3", "d4", "d5"),
|
||||||
"2014-02-13T00:00:00.000Z/2014-02-15T00:00:00.000Z",
|
KEY_METRICS,
|
||||||
ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")),
|
ImmutableSet.of("m1", "m2", "m3", "m4", "m5")
|
||||||
"2014-02-16T00:00:00.000Z/2014-02-17T00:00:00.000Z",
|
|
||||||
ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")),
|
|
||||||
"2014-02-17T00:00:00.000Z/2014-02-18T00:00:00.000Z",
|
|
||||||
ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d2"), KEY_METRICS, ImmutableSet.of("m2"))
|
|
||||||
);
|
);
|
||||||
|
|
||||||
Map<String, Object> actual = defaultResource.getDatasource(dataSource, null, "false");
|
Map<String, Object> actual = defaultResource.getDatasource(dataSource, null, null);
|
||||||
|
|
||||||
Assert.assertEquals(expected, actual);
|
Assert.assertEquals(expected, actual);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testGetDatasourceFullWithOvershadowedSegments1()
|
||||||
|
{
|
||||||
|
Map<String, Object> actual = resource.getDatasource(
|
||||||
|
dataSource,
|
||||||
|
"2015-02-02T09:00:00.000Z/2015-02-06T23:00:00.000Z",
|
||||||
|
"true"
|
||||||
|
);
|
||||||
|
|
||||||
|
Map<String, Object> expected = ImmutableMap.<String, Object>of(
|
||||||
|
"2015-02-02T09:00:00.000Z/2015-02-03T00:00:00.000Z",
|
||||||
|
ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")),
|
||||||
|
"2015-02-03T00:00:00.000Z/2015-02-05T00:00:00.000Z",
|
||||||
|
ImmutableMap.of(
|
||||||
|
KEY_DIMENSIONS,
|
||||||
|
ImmutableSet.of("d1", "d2", "d3"),
|
||||||
|
KEY_METRICS,
|
||||||
|
ImmutableSet.of("m1", "m2", "m3")
|
||||||
|
),
|
||||||
|
"2015-02-05T00:00:00.000Z/2015-02-06T23:00:00.000Z",
|
||||||
|
ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1"))
|
||||||
|
);
|
||||||
|
|
||||||
|
EasyMock.verify(serverInventoryView, timelineServerView);
|
||||||
|
Assert.assertEquals(expected, actual);
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testGetDatasourceFullWithOvershadowedSegments2()
|
||||||
|
{
|
||||||
|
Map<String, Object> actual= resource.getDatasource(
|
||||||
|
dataSource,
|
||||||
|
"2015-02-09T09:00:00.000Z/2015-02-13T23:00:00.000Z",
|
||||||
|
"true"
|
||||||
|
);
|
||||||
|
|
||||||
|
Map<String, Object> expected = ImmutableMap.<String, Object>of(
|
||||||
|
"2015-02-09T09:00:00.000Z/2015-02-10T00:00:00.000Z",
|
||||||
|
ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1", "d3"), KEY_METRICS, ImmutableSet.of("m1", "m3")),
|
||||||
|
"2015-02-10T00:00:00.000Z/2015-02-11T00:00:00.000Z",
|
||||||
|
ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")),
|
||||||
|
"2015-02-11T00:00:00.000Z/2015-02-12T00:00:00.000Z",
|
||||||
|
ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d3"), KEY_METRICS, ImmutableSet.of("m3")),
|
||||||
|
"2015-02-12T00:00:00.000Z/2015-02-13T00:00:00.000Z",
|
||||||
|
ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1"))
|
||||||
|
);
|
||||||
|
|
||||||
|
EasyMock.verify(serverInventoryView, timelineServerView);
|
||||||
|
Assert.assertEquals(expected, actual);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Though segments within [2015-03-13, 2015-03-19] have different versions, they all abut with each other and have
|
||||||
|
* same dimensions/metrics, so they all should be merged together.
|
||||||
|
*/
|
||||||
|
@Test
|
||||||
|
public void testGetDatasourceFullWithOvershadowedSegmentsMerged()
|
||||||
|
{
|
||||||
|
Map<String, Object> actual = resource.getDatasource(
|
||||||
|
dataSource,
|
||||||
|
"2015-03-13T02:00:00.000Z/2015-03-19T15:00:00.000Z",
|
||||||
|
"true"
|
||||||
|
);
|
||||||
|
|
||||||
|
Map<String, Object> expected = ImmutableMap.<String, Object>of(
|
||||||
|
"2015-03-13T02:00:00.000Z/2015-03-19T00:00:00.000Z",
|
||||||
|
ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1"))
|
||||||
|
);
|
||||||
|
|
||||||
|
EasyMock.verify(serverInventoryView, timelineServerView);
|
||||||
|
Assert.assertEquals(expected, actual);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* If "full" is specified, then dimensions/metrics that exist in an incompelte segment should be ingored
|
||||||
|
*/
|
||||||
|
@Test
|
||||||
|
public void testGetDatasourceFullWithIncompleteSegment()
|
||||||
|
{
|
||||||
|
Map<String, Object> actual = resource.getDatasource(dataSource, "2015-04-03/2015-04-05", "true");
|
||||||
|
Map<String, Object> expected = ImmutableMap.of();
|
||||||
|
|
||||||
|
EasyMock.verify(serverInventoryView, timelineServerView);
|
||||||
|
Assert.assertEquals(expected, actual);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testGetDatasourceFullWithLargeInterval()
|
||||||
|
{
|
||||||
|
Map<String, Object> actual = resource.getDatasource(dataSource, "1975/2050", "true");
|
||||||
|
Map<String, Object> expected = ImmutableMap.<String, Object>builder().put(
|
||||||
|
"2014-02-13T00:00:00.000Z/2014-02-15T00:00:00.000Z",
|
||||||
|
ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1"))
|
||||||
|
).put(
|
||||||
|
"2014-02-16T00:00:00.000Z/2014-02-17T00:00:00.000Z",
|
||||||
|
ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1"))
|
||||||
|
).put(
|
||||||
|
"2014-02-17T00:00:00.000Z/2014-02-18T00:00:00.000Z",
|
||||||
|
ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d2"), KEY_METRICS, ImmutableSet.of("m2"))
|
||||||
|
).put(
|
||||||
|
"2015-02-01T00:00:00.000Z/2015-02-03T00:00:00.000Z",
|
||||||
|
ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1"))
|
||||||
|
).put(
|
||||||
|
"2015-02-03T00:00:00.000Z/2015-02-05T00:00:00.000Z",
|
||||||
|
ImmutableMap.of(
|
||||||
|
KEY_DIMENSIONS,
|
||||||
|
ImmutableSet.of("d1", "d2", "d3"),
|
||||||
|
KEY_METRICS,
|
||||||
|
ImmutableSet.of("m1", "m2", "m3")
|
||||||
|
)
|
||||||
|
).put(
|
||||||
|
"2015-02-05T00:00:00.000Z/2015-02-09T00:00:00.000Z",
|
||||||
|
ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1"))
|
||||||
|
).put(
|
||||||
|
"2015-02-09T00:00:00.000Z/2015-02-10T00:00:00.000Z",
|
||||||
|
ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1", "d3"), KEY_METRICS, ImmutableSet.of("m1", "m3"))
|
||||||
|
).put(
|
||||||
|
"2015-02-10T00:00:00.000Z/2015-02-11T00:00:00.000Z",
|
||||||
|
ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1"))
|
||||||
|
).put(
|
||||||
|
"2015-02-11T00:00:00.000Z/2015-02-12T00:00:00.000Z",
|
||||||
|
ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d3"), KEY_METRICS, ImmutableSet.of("m3"))
|
||||||
|
).put(
|
||||||
|
"2015-02-12T00:00:00.000Z/2015-02-13T00:00:00.000Z",
|
||||||
|
ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1"))
|
||||||
|
).put(
|
||||||
|
"2015-03-13T00:00:00.000Z/2015-03-19T00:00:00.000Z",
|
||||||
|
ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1"))
|
||||||
|
).build();
|
||||||
|
|
||||||
|
EasyMock.verify(serverInventoryView, timelineServerView);
|
||||||
|
Assert.assertEquals(expected, actual);
|
||||||
|
}
|
||||||
|
|
||||||
private void addSegment(
|
private void addSegment(
|
||||||
VersionedIntervalTimeline<String, ServerSelector> timeline,
|
VersionedIntervalTimeline<String, ServerSelector> timeline,
|
||||||
DruidServer server,
|
DruidServer server,
|
||||||
String interval,
|
String interval,
|
||||||
List<String> dims,
|
List<String> dims,
|
||||||
List<String> metrics
|
List<String> metrics,
|
||||||
|
String version
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
DataSegment segment = DataSegment.builder()
|
DataSegment segment = DataSegment.builder()
|
||||||
|
@ -185,6 +361,30 @@ public class ClientInfoResourceTest
|
||||||
timeline.add(new Interval(interval), version, new SingleElementPartitionChunk<ServerSelector>(ss));
|
timeline.add(new Interval(interval), version, new SingleElementPartitionChunk<ServerSelector>(ss));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private void addSegmentWithShardSpec(
|
||||||
|
VersionedIntervalTimeline<String, ServerSelector> timeline,
|
||||||
|
DruidServer server,
|
||||||
|
String interval,
|
||||||
|
List<String> dims,
|
||||||
|
List<String> metrics,
|
||||||
|
String version,
|
||||||
|
ShardSpec shardSpec
|
||||||
|
)
|
||||||
|
{
|
||||||
|
DataSegment segment = DataSegment.builder()
|
||||||
|
.dataSource(dataSource)
|
||||||
|
.interval(new Interval(interval))
|
||||||
|
.version(version)
|
||||||
|
.dimensions(dims)
|
||||||
|
.metrics(metrics)
|
||||||
|
.shardSpec(shardSpec)
|
||||||
|
.size(1)
|
||||||
|
.build();
|
||||||
|
server.addDataSegment(segment.getIdentifier(), segment);
|
||||||
|
ServerSelector ss = new ServerSelector(segment, null);
|
||||||
|
timeline.add(new Interval(interval), version, shardSpec.createChunk(ss));
|
||||||
|
}
|
||||||
|
|
||||||
private ClientInfoResource getResourceTestHelper(
|
private ClientInfoResource getResourceTestHelper(
|
||||||
InventoryView serverInventoryView,
|
InventoryView serverInventoryView,
|
||||||
TimelineServerView timelineServerView,
|
TimelineServerView timelineServerView,
|
||||||
|
|
Loading…
Reference in New Issue