diff --git a/server/src/main/java/io/druid/server/ClientInfoResource.java b/server/src/main/java/io/druid/server/ClientInfoResource.java index d214a226dc7..620f081af24 100644 --- a/server/src/main/java/io/druid/server/ClientInfoResource.java +++ b/server/src/main/java/io/druid/server/ClientInfoResource.java @@ -42,6 +42,7 @@ import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; +import io.druid.timeline.partition.PartitionHolder; import org.joda.time.DateTime; import org.joda.time.Interval; @@ -149,30 +150,23 @@ public class ClientInfoResource ); for (TimelineObjectHolder holder : serversLookup) { + final Set dimensions = Sets.newHashSet(); + final Set metrics = Sets.newHashSet(); + final PartitionHolder partitionHolder = holder.getObject(); + if (partitionHolder.isComplete()) { + for (ServerSelector server : partitionHolder.payloads()) { + final DataSegment segment = server.getSegment(); + dimensions.addAll(segment.getDimensions()); + metrics.addAll(segment.getMetrics()); + } + } + servedIntervals.put( holder.getInterval(), - ImmutableMap.of(KEY_DIMENSIONS, Sets.newHashSet(), KEY_METRICS, Sets.newHashSet()) + ImmutableMap.of(KEY_DIMENSIONS, dimensions, KEY_METRICS, metrics) ); } - List segments = getSegmentsForDatasources().get(dataSourceName); - if (segments == null || segments.isEmpty()) { - log.error( - "Found no DataSegments but TimelineServerView has served intervals. Datasource = %s , Interval = %s", - dataSourceName, - theInterval - ); - throw new RuntimeException("Internal Error"); - } - - for (DataSegment segment : segments) { - if (servedIntervals.containsKey(segment.getInterval())) { - Map> columns = (Map>) servedIntervals.get(segment.getInterval()); - columns.get(KEY_DIMENSIONS).addAll(segment.getDimensions()); - columns.get(KEY_METRICS).addAll(segment.getMetrics()); - } - } - //collapse intervals if they abut and have same set of columns Map result = Maps.newLinkedHashMap(); Interval curr = null; diff --git a/server/src/test/java/io/druid/server/ClientInfoResourceTest.java b/server/src/test/java/io/druid/server/ClientInfoResourceTest.java index 8a8e50fb7d5..7a45d651012 100644 --- a/server/src/test/java/io/druid/server/ClientInfoResourceTest.java +++ b/server/src/test/java/io/druid/server/ClientInfoResourceTest.java @@ -17,9 +17,10 @@ package io.druid.server; -import java.util.List; -import java.util.Map; - +import com.google.common.collect.ImmutableList; +import com.google.common.collect.ImmutableMap; +import com.google.common.collect.ImmutableSet; +import com.google.common.collect.Ordering; import io.druid.client.DruidServer; import io.druid.client.InventoryView; import io.druid.client.TimelineServerView; @@ -28,8 +29,9 @@ import io.druid.query.TableDataSource; import io.druid.query.metadata.SegmentMetadataQueryConfig; import io.druid.timeline.DataSegment; import io.druid.timeline.VersionedIntervalTimeline; +import io.druid.timeline.partition.NumberedShardSpec; +import io.druid.timeline.partition.ShardSpec; import io.druid.timeline.partition.SingleElementPartitionChunk; - import org.easymock.EasyMock; import org.joda.time.DateTime; import org.joda.time.Interval; @@ -37,10 +39,8 @@ import org.junit.Assert; import org.junit.Before; import org.junit.Test; -import com.google.common.collect.ImmutableList; -import com.google.common.collect.ImmutableMap; -import com.google.common.collect.ImmutableSet; -import com.google.common.collect.Ordering; +import java.util.List; +import java.util.Map; public class ClientInfoResourceTest { @@ -50,7 +50,6 @@ public class ClientInfoResourceTest private final String dataSource = "test-data-source"; - private final String version = "v0"; private InventoryView serverInventoryView; private TimelineServerView timelineServerView; @@ -62,11 +61,55 @@ public class ClientInfoResourceTest VersionedIntervalTimeline timeline = new VersionedIntervalTimeline<>(Ordering.natural()); DruidServer server = new DruidServer("name", "host", 1234, "type", "tier", 0); - addSegment(timeline, server, "1960-02-13/1961-02-14", ImmutableList.of("d1"), ImmutableList.of("m1")); - addSegment(timeline, server, "2014-02-13/2014-02-14", ImmutableList.of("d1"), ImmutableList.of("m1")); - addSegment(timeline, server, "2014-02-14/2014-02-15", ImmutableList.of("d1"), ImmutableList.of("m1")); - addSegment(timeline, server, "2014-02-16/2014-02-17", ImmutableList.of("d1"), ImmutableList.of("m1")); - addSegment(timeline, server, "2014-02-17/2014-02-18", ImmutableList.of("d2"), ImmutableList.of("m2")); + addSegment(timeline, server, "1960-02-13/1961-02-14", ImmutableList.of("d5"), ImmutableList.of("m5"), "v0"); + + // segments within [2014-02-13, 2014-02-18] + addSegment(timeline, server, "2014-02-13/2014-02-14", ImmutableList.of("d1"), ImmutableList.of("m1"), "v0"); + addSegment(timeline, server, "2014-02-14/2014-02-15", ImmutableList.of("d1"), ImmutableList.of("m1"), "v0"); + addSegment(timeline, server, "2014-02-16/2014-02-17", ImmutableList.of("d1"), ImmutableList.of("m1"), "v0"); + addSegment(timeline, server, "2014-02-17/2014-02-18", ImmutableList.of("d2"), ImmutableList.of("m2"), "v0"); + + // segments within [2015-02-01, 2015-02-13] + addSegment(timeline, server, "2015-02-01/2015-02-07", ImmutableList.of("d1"), ImmutableList.of("m1"), "v1"); + addSegment(timeline, server, "2015-02-07/2015-02-13", ImmutableList.of("d1"), ImmutableList.of("m1"), "v1"); + addSegmentWithShardSpec( + timeline, server, "2015-02-03/2015-02-05", + ImmutableList.of("d1", "d2"), + ImmutableList.of("m1", "m2"), + "v2", + new NumberedShardSpec(0, 2) + ); + addSegmentWithShardSpec( + timeline, server, "2015-02-03/2015-02-05", + ImmutableList.of("d1", "d2", "d3"), + ImmutableList.of("m1", "m2", "m3"), + "v2", + new NumberedShardSpec(1, 2) + ); + addSegment( + timeline, + server, + "2015-02-09/2015-02-10", + ImmutableList.of("d1", "d3"), + ImmutableList.of("m1", "m3"), + "v2" + ); + addSegment(timeline, server, "2015-02-11/2015-02-12", ImmutableList.of("d3"), ImmutableList.of("m3"), "v2"); + + // segments within [2015-03-13, 2015-03-19] + addSegment(timeline, server, "2015-03-13/2015-03-19", ImmutableList.of("d1"), ImmutableList.of("m1"), "v3"); + addSegment(timeline, server, "2015-03-13/2015-03-14", ImmutableList.of("d1"), ImmutableList.of("m1"), "v4"); + addSegment(timeline, server, "2015-03-14/2015-03-15", ImmutableList.of("d1"), ImmutableList.of("m1"), "v5"); + addSegment(timeline, server, "2015-03-15/2015-03-16", ImmutableList.of("d1"), ImmutableList.of("m1"), "v6"); + + // imcomplete segment + addSegmentWithShardSpec( + timeline, server, "2015-04-03/2015-04-05", + ImmutableList.of("d4"), + ImmutableList.of("m4"), + "v7", + new NumberedShardSpec(0, 2) + ); serverInventoryView = EasyMock.createMock(InventoryView.class); EasyMock.expect(serverInventoryView.getInventory()).andReturn(ImmutableList.of(server)).anyTimes(); @@ -83,10 +126,10 @@ public class ClientInfoResourceTest } @Test - public void testGetDatasourceNonFullWithLargeInterval() + public void testGetDatasourceNonFullWithInterval() { - Map actual = resource.getDatasource(dataSource, "1975/2050", null); - Map expected = ImmutableMap.of( + Map actual = resource.getDatasource(dataSource, "1975/2015", null); + Map expected = ImmutableMap.of( KEY_DIMENSIONS, ImmutableSet.of("d1", "d2"), KEY_METRICS, ImmutableSet.of("m1", "m2") ); @@ -95,11 +138,10 @@ public class ClientInfoResourceTest } @Test - public void testGetDatasourceFullWithLargeInterval() + public void testGetDatasourceFullWithInterval() { - - Map actual = resource.getDatasource(dataSource, "1975/2050", "true"); - Map expected = ImmutableMap.of( + Map actual = resource.getDatasource(dataSource, "1975/2015", "true"); + Map expected = ImmutableMap.of( "2014-02-13T00:00:00.000Z/2014-02-15T00:00:00.000Z", ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")), "2014-02-16T00:00:00.000Z/2014-02-17T00:00:00.000Z", @@ -120,7 +162,7 @@ public class ClientInfoResourceTest "2014-02-13T09:00:00.000Z/2014-02-17T23:00:00.000Z", "true" ); - Map expected = ImmutableMap.of( + Map expected = ImmutableMap.of( "2014-02-13T09:00:00.000Z/2014-02-15T00:00:00.000Z", ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")), "2014-02-16T00:00:00.000Z/2014-02-17T00:00:00.000Z", @@ -136,8 +178,10 @@ public class ClientInfoResourceTest @Test public void testGetDatasourceWithDefaultInterval() { - Map actual = resource.getDatasource(dataSource, null, "false"); - Assert.assertEquals(actual.size(), 0); + Map actual = resource.getDatasource(dataSource, null, null); + Map expected = ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of(), KEY_METRICS, ImmutableSet.of()); + + Assert.assertEquals(expected, actual); } @Test @@ -148,28 +192,160 @@ public class ClientInfoResourceTest new SegmentMetadataQueryConfig("P100Y") ); - Map expected = ImmutableMap.of( - "1960-02-13T00:00:00.000Z/1961-02-14T00:00:00.000Z", - ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")), - "2014-02-13T00:00:00.000Z/2014-02-15T00:00:00.000Z", - ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")), - "2014-02-16T00:00:00.000Z/2014-02-17T00:00:00.000Z", - ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")), - "2014-02-17T00:00:00.000Z/2014-02-18T00:00:00.000Z", - ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d2"), KEY_METRICS, ImmutableSet.of("m2")) + Map expected = ImmutableMap.of( + KEY_DIMENSIONS, + ImmutableSet.of("d1", "d2", "d3", "d4", "d5"), + KEY_METRICS, + ImmutableSet.of("m1", "m2", "m3", "m4", "m5") ); - Map actual = defaultResource.getDatasource(dataSource, null, "false"); + Map actual = defaultResource.getDatasource(dataSource, null, null); + Assert.assertEquals(expected, actual); } + @Test + public void testGetDatasourceFullWithOvershadowedSegments1() + { + Map actual = resource.getDatasource( + dataSource, + "2015-02-02T09:00:00.000Z/2015-02-06T23:00:00.000Z", + "true" + ); + + Map expected = ImmutableMap.of( + "2015-02-02T09:00:00.000Z/2015-02-03T00:00:00.000Z", + ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")), + "2015-02-03T00:00:00.000Z/2015-02-05T00:00:00.000Z", + ImmutableMap.of( + KEY_DIMENSIONS, + ImmutableSet.of("d1", "d2", "d3"), + KEY_METRICS, + ImmutableSet.of("m1", "m2", "m3") + ), + "2015-02-05T00:00:00.000Z/2015-02-06T23:00:00.000Z", + ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")) + ); + + EasyMock.verify(serverInventoryView, timelineServerView); + Assert.assertEquals(expected, actual); + + } + + @Test + public void testGetDatasourceFullWithOvershadowedSegments2() + { + Map actual= resource.getDatasource( + dataSource, + "2015-02-09T09:00:00.000Z/2015-02-13T23:00:00.000Z", + "true" + ); + + Map expected = ImmutableMap.of( + "2015-02-09T09:00:00.000Z/2015-02-10T00:00:00.000Z", + ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1", "d3"), KEY_METRICS, ImmutableSet.of("m1", "m3")), + "2015-02-10T00:00:00.000Z/2015-02-11T00:00:00.000Z", + ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")), + "2015-02-11T00:00:00.000Z/2015-02-12T00:00:00.000Z", + ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d3"), KEY_METRICS, ImmutableSet.of("m3")), + "2015-02-12T00:00:00.000Z/2015-02-13T00:00:00.000Z", + ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")) + ); + + EasyMock.verify(serverInventoryView, timelineServerView); + Assert.assertEquals(expected, actual); + } + + /** + * Though segments within [2015-03-13, 2015-03-19] have different versions, they all abut with each other and have + * same dimensions/metrics, so they all should be merged together. + */ + @Test + public void testGetDatasourceFullWithOvershadowedSegmentsMerged() + { + Map actual = resource.getDatasource( + dataSource, + "2015-03-13T02:00:00.000Z/2015-03-19T15:00:00.000Z", + "true" + ); + + Map expected = ImmutableMap.of( + "2015-03-13T02:00:00.000Z/2015-03-19T00:00:00.000Z", + ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")) + ); + + EasyMock.verify(serverInventoryView, timelineServerView); + Assert.assertEquals(expected, actual); + } + + /** + * If "full" is specified, then dimensions/metrics that exist in an incompelte segment should be ingored + */ + @Test + public void testGetDatasourceFullWithIncompleteSegment() + { + Map actual = resource.getDatasource(dataSource, "2015-04-03/2015-04-05", "true"); + Map expected = ImmutableMap.of(); + + EasyMock.verify(serverInventoryView, timelineServerView); + Assert.assertEquals(expected, actual); + } + + @Test + public void testGetDatasourceFullWithLargeInterval() + { + Map actual = resource.getDatasource(dataSource, "1975/2050", "true"); + Map expected = ImmutableMap.builder().put( + "2014-02-13T00:00:00.000Z/2014-02-15T00:00:00.000Z", + ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")) + ).put( + "2014-02-16T00:00:00.000Z/2014-02-17T00:00:00.000Z", + ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")) + ).put( + "2014-02-17T00:00:00.000Z/2014-02-18T00:00:00.000Z", + ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d2"), KEY_METRICS, ImmutableSet.of("m2")) + ).put( + "2015-02-01T00:00:00.000Z/2015-02-03T00:00:00.000Z", + ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")) + ).put( + "2015-02-03T00:00:00.000Z/2015-02-05T00:00:00.000Z", + ImmutableMap.of( + KEY_DIMENSIONS, + ImmutableSet.of("d1", "d2", "d3"), + KEY_METRICS, + ImmutableSet.of("m1", "m2", "m3") + ) + ).put( + "2015-02-05T00:00:00.000Z/2015-02-09T00:00:00.000Z", + ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")) + ).put( + "2015-02-09T00:00:00.000Z/2015-02-10T00:00:00.000Z", + ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1", "d3"), KEY_METRICS, ImmutableSet.of("m1", "m3")) + ).put( + "2015-02-10T00:00:00.000Z/2015-02-11T00:00:00.000Z", + ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")) + ).put( + "2015-02-11T00:00:00.000Z/2015-02-12T00:00:00.000Z", + ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d3"), KEY_METRICS, ImmutableSet.of("m3")) + ).put( + "2015-02-12T00:00:00.000Z/2015-02-13T00:00:00.000Z", + ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")) + ).put( + "2015-03-13T00:00:00.000Z/2015-03-19T00:00:00.000Z", + ImmutableMap.of(KEY_DIMENSIONS, ImmutableSet.of("d1"), KEY_METRICS, ImmutableSet.of("m1")) + ).build(); + + EasyMock.verify(serverInventoryView, timelineServerView); + Assert.assertEquals(expected, actual); + } private void addSegment( VersionedIntervalTimeline timeline, DruidServer server, String interval, List dims, - List metrics + List metrics, + String version ) { DataSegment segment = DataSegment.builder() @@ -185,6 +361,30 @@ public class ClientInfoResourceTest timeline.add(new Interval(interval), version, new SingleElementPartitionChunk(ss)); } + private void addSegmentWithShardSpec( + VersionedIntervalTimeline timeline, + DruidServer server, + String interval, + List dims, + List metrics, + String version, + ShardSpec shardSpec + ) + { + DataSegment segment = DataSegment.builder() + .dataSource(dataSource) + .interval(new Interval(interval)) + .version(version) + .dimensions(dims) + .metrics(metrics) + .shardSpec(shardSpec) + .size(1) + .build(); + server.addDataSegment(segment.getIdentifier(), segment); + ServerSelector ss = new ServerSelector(segment, null); + timeline.add(new Interval(interval), version, shardSpec.createChunk(ss)); + } + private ClientInfoResource getResourceTestHelper( InventoryView serverInventoryView, TimelineServerView timelineServerView,