mirror of https://github.com/apache/druid.git
Merge pull request #1865 from noddi/bugfix/issue-560
Fix #560 - datasource segments summary shouldn't include replication.
This commit is contained in:
commit
dc4ae59aaf
|
@ -51,6 +51,7 @@ import javax.ws.rs.core.MediaType;
|
|||
import javax.ws.rs.core.Response;
|
||||
|
||||
import java.util.Comparator;
|
||||
import java.util.HashSet;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
@ -498,47 +499,59 @@ public class DatasourcesResource
|
|||
"tiers", tiers,
|
||||
"segments", segments
|
||||
);
|
||||
Set<String> totalDistinctSegments = Sets.newHashSet();
|
||||
Map<String, HashSet<Object>> tierDistinctSegments = Maps.newHashMap();
|
||||
|
||||
int totalSegmentCount = 0;
|
||||
long totalSegmentSize = 0;
|
||||
long minTime = Long.MAX_VALUE;
|
||||
long maxTime = Long.MIN_VALUE;
|
||||
String tier;
|
||||
for (DruidServer druidServer : serverInventoryView.getInventory()) {
|
||||
DruidDataSource druidDataSource = druidServer.getDataSource(dataSourceName);
|
||||
tier = druidServer.getTier();
|
||||
|
||||
if (druidDataSource == null) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!tierDistinctSegments.containsKey(tier)) {
|
||||
tierDistinctSegments.put(tier, Sets.newHashSet());
|
||||
}
|
||||
|
||||
long dataSourceSegmentSize = 0;
|
||||
for (DataSegment dataSegment : druidDataSource.getSegments()) {
|
||||
dataSourceSegmentSize += dataSegment.getSize();
|
||||
if (dataSegment.getInterval().getStartMillis() < minTime) {
|
||||
minTime = dataSegment.getInterval().getStartMillis();
|
||||
// tier segments stats
|
||||
if (!tierDistinctSegments.get(tier).contains(dataSegment.getIdentifier())) {
|
||||
dataSourceSegmentSize += dataSegment.getSize();
|
||||
tierDistinctSegments.get(tier).add(dataSegment.getIdentifier());
|
||||
}
|
||||
if (dataSegment.getInterval().getEndMillis() > maxTime) {
|
||||
maxTime = dataSegment.getInterval().getEndMillis();
|
||||
// total segments stats
|
||||
if (!totalDistinctSegments.contains(dataSegment.getIdentifier())) {
|
||||
totalSegmentSize += dataSegment.getSize();
|
||||
totalDistinctSegments.add(dataSegment.getIdentifier());
|
||||
|
||||
if (dataSegment.getInterval().getStartMillis() < minTime) {
|
||||
minTime = dataSegment.getInterval().getStartMillis();
|
||||
}
|
||||
if (dataSegment.getInterval().getEndMillis() > maxTime) {
|
||||
maxTime = dataSegment.getInterval().getEndMillis();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// segment stats
|
||||
totalSegmentCount += druidDataSource.getSegments().size();
|
||||
totalSegmentSize += dataSourceSegmentSize;
|
||||
|
||||
// tier stats
|
||||
Map<String, Object> tierStats = (Map) tiers.get(druidServer.getTier());
|
||||
Map<String, Object> tierStats = (Map) tiers.get(tier);
|
||||
if (tierStats == null) {
|
||||
tierStats = Maps.newHashMap();
|
||||
tiers.put(druidServer.getTier(), tierStats);
|
||||
}
|
||||
int segmentCount = MapUtils.getInt(tierStats, "segmentCount", 0);
|
||||
tierStats.put("segmentCount", segmentCount + druidDataSource.getSegments().size());
|
||||
tierStats.put("segmentCount", tierDistinctSegments.get(tier).size());
|
||||
|
||||
long segmentSize = MapUtils.getLong(tierStats, "size", 0L);
|
||||
tierStats.put("size", segmentSize + dataSourceSegmentSize);
|
||||
}
|
||||
|
||||
segments.put("count", totalSegmentCount);
|
||||
segments.put("count", totalDistinctSegments.size());
|
||||
segments.put("size", totalSegmentSize);
|
||||
segments.put("minTime", new DateTime(minTime));
|
||||
segments.put("maxTime", new DateTime(maxTime));
|
||||
|
|
|
@ -63,7 +63,7 @@ public class DatasourcesResourceTest
|
|||
null,
|
||||
null,
|
||||
0x9,
|
||||
0
|
||||
10
|
||||
)
|
||||
);
|
||||
dataSegmentList.add(
|
||||
|
@ -76,7 +76,7 @@ public class DatasourcesResourceTest
|
|||
null,
|
||||
null,
|
||||
0x9,
|
||||
0
|
||||
20
|
||||
)
|
||||
);
|
||||
dataSegmentList.add(
|
||||
|
@ -89,7 +89,7 @@ public class DatasourcesResourceTest
|
|||
null,
|
||||
null,
|
||||
0x9,
|
||||
0
|
||||
30
|
||||
)
|
||||
);
|
||||
listDataSources = new ArrayList<>();
|
||||
|
@ -199,7 +199,7 @@ public class DatasourcesResourceTest
|
|||
DruidDataSource dataSource1 = new DruidDataSource("datasource1", new HashMap());
|
||||
dataSource1.addSegment(
|
||||
"partition",
|
||||
new DataSegment("datasegment1", new Interval("2010-01-01/P1D"), null, null, null, null, null, 0x9, 0)
|
||||
new DataSegment("datasegment1", new Interval("2010-01-01/P1D"), null, null, null, null, null, 0x9, 10)
|
||||
);
|
||||
EasyMock.expect(server.getDataSource("datasource1")).andReturn(
|
||||
dataSource1
|
||||
|
@ -215,12 +215,56 @@ public class DatasourcesResourceTest
|
|||
Assert.assertEquals(200, response.getStatus());
|
||||
Map<String, Map<String, Object>> result = (Map<String, Map<String, Object>>) response.getEntity();
|
||||
Assert.assertEquals(1, ((Map) (result.get("tiers").get(null))).get("segmentCount"));
|
||||
Assert.assertEquals(10L, ((Map) (result.get("tiers").get(null))).get("size"));
|
||||
Assert.assertNotNull(result.get("segments"));
|
||||
Assert.assertNotNull(result.get("segments").get("minTime").toString(), "2010-01-01T00:00:00.000Z");
|
||||
Assert.assertNotNull(result.get("segments").get("maxTime").toString(), "2010-01-02T00:00:00.000Z");
|
||||
Assert.assertEquals("2010-01-01T00:00:00.000Z", result.get("segments").get("minTime").toString());
|
||||
Assert.assertEquals("2010-01-02T00:00:00.000Z", result.get("segments").get("maxTime").toString());
|
||||
Assert.assertEquals(1, result.get("segments").get("count"));
|
||||
Assert.assertEquals(10L, result.get("segments").get("size"));
|
||||
EasyMock.verify(inventoryView, server);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testSimpleGetTheDataSourceManyTiers() throws Exception
|
||||
{
|
||||
EasyMock.expect(server.getDataSource("datasource1")).andReturn(
|
||||
listDataSources.get(0)
|
||||
).atLeastOnce();
|
||||
EasyMock.expect(server.getTier()).andReturn("cold").atLeastOnce();
|
||||
|
||||
DruidServer server2 = EasyMock.createStrictMock(DruidServer.class);
|
||||
EasyMock.expect(server2.getDataSource("datasource1")).andReturn(
|
||||
listDataSources.get(1)
|
||||
).atLeastOnce();
|
||||
EasyMock.expect(server2.getTier()).andReturn("hot").atLeastOnce();
|
||||
|
||||
DruidServer server3 = EasyMock.createStrictMock(DruidServer.class);
|
||||
EasyMock.expect(server3.getDataSource("datasource1")).andReturn(
|
||||
listDataSources.get(1)
|
||||
).atLeastOnce();
|
||||
EasyMock.expect(server3.getTier()).andReturn("cold").atLeastOnce();
|
||||
|
||||
EasyMock.expect(inventoryView.getInventory()).andReturn(
|
||||
ImmutableList.of(server, server2, server3)
|
||||
).atLeastOnce();
|
||||
|
||||
EasyMock.replay(inventoryView, server, server2, server3);
|
||||
DatasourcesResource datasourcesResource = new DatasourcesResource(inventoryView, null, null);
|
||||
Response response = datasourcesResource.getTheDataSource("datasource1", null);
|
||||
Assert.assertEquals(200, response.getStatus());
|
||||
Map<String, Map<String, Object>> result = (Map<String, Map<String, Object>>) response.getEntity();
|
||||
Assert.assertEquals(2, ((Map) (result.get("tiers").get("cold"))).get("segmentCount"));
|
||||
Assert.assertEquals(30L, ((Map) (result.get("tiers").get("cold"))).get("size"));
|
||||
Assert.assertEquals(1, ((Map) (result.get("tiers").get("hot"))).get("segmentCount"));
|
||||
Assert.assertEquals(20L, ((Map) (result.get("tiers").get("hot"))).get("size"));
|
||||
Assert.assertNotNull(result.get("segments"));
|
||||
Assert.assertEquals("2010-01-01T00:00:00.000Z", result.get("segments").get("minTime").toString());
|
||||
Assert.assertEquals("2010-01-23T00:00:00.000Z", result.get("segments").get("maxTime").toString());
|
||||
Assert.assertEquals(2, result.get("segments").get("count"));
|
||||
Assert.assertEquals(30L, result.get("segments").get("size"));
|
||||
EasyMock.verify(inventoryView, server, server2, server3);
|
||||
}
|
||||
|
||||
@Test
|
||||
public void testGetSegmentDataSourceIntervals()
|
||||
{
|
||||
|
|
Loading…
Reference in New Issue