mirror of https://github.com/apache/druid.git
cleaner code for dimension/metric exploration on broker
This commit is contained in:
parent
e0bf3187b2
commit
fb87458056
|
@ -24,11 +24,11 @@ import com.google.common.collect.Lists;
|
|||
import com.google.common.collect.Maps;
|
||||
import com.google.common.collect.Sets;
|
||||
import com.google.inject.Inject;
|
||||
import com.metamx.common.IAE;
|
||||
import com.metamx.druid.client.ClientInventoryManager;
|
||||
import com.metamx.druid.client.DataSegment;
|
||||
import com.metamx.druid.client.DruidDataSource;
|
||||
import com.metamx.druid.client.DruidServer;
|
||||
import org.joda.time.DateTime;
|
||||
import org.joda.time.Interval;
|
||||
|
||||
import javax.ws.rs.GET;
|
||||
|
@ -36,7 +36,7 @@ import javax.ws.rs.Path;
|
|||
import javax.ws.rs.PathParam;
|
||||
import javax.ws.rs.Produces;
|
||||
import javax.ws.rs.QueryParam;
|
||||
import java.util.Iterator;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Set;
|
||||
|
||||
|
@ -57,22 +57,26 @@ public class ClientInfoResource
|
|||
this.clientInventoryManager = clientInventoryManager;
|
||||
}
|
||||
|
||||
private Map<String, DruidDataSource> updateDataSources()
|
||||
private Map<String, List<DataSegment>> getSegmentsForDatasources()
|
||||
{
|
||||
final Map<String, DruidDataSource> dataSources = Maps.newHashMap();
|
||||
final Map<String, List<DataSegment>> dataSourceMap = Maps.newHashMap();
|
||||
for (DruidServer server : clientInventoryManager.getInventory()) {
|
||||
for (DruidDataSource dataSource : server.getDataSources()) {
|
||||
dataSources.put(dataSource.getName(), dataSource);
|
||||
if (!dataSourceMap.containsKey(dataSource.getName())) {
|
||||
dataSourceMap.put(dataSource.getName(), Lists.<DataSegment>newArrayList());
|
||||
}
|
||||
List<DataSegment> segments = dataSourceMap.get(dataSource.getName());
|
||||
segments.addAll(dataSource.getSegments());
|
||||
}
|
||||
}
|
||||
return dataSources;
|
||||
return dataSourceMap;
|
||||
}
|
||||
|
||||
@GET
|
||||
@Produces("application/json")
|
||||
public Iterable<String> getDataSources()
|
||||
{
|
||||
return updateDataSources().keySet();
|
||||
return getSegmentsForDatasources().keySet();
|
||||
}
|
||||
|
||||
@GET
|
||||
|
@ -80,7 +84,7 @@ public class ClientInfoResource
|
|||
@Produces("application/json")
|
||||
public Map<String, Object> getDatasource(
|
||||
@PathParam("dataSourceName") String dataSourceName,
|
||||
@QueryParam("interval") String interval
|
||||
@QueryParam("interval") Interval interval
|
||||
)
|
||||
{
|
||||
return ImmutableMap.<String, Object>of(
|
||||
|
@ -94,47 +98,24 @@ public class ClientInfoResource
|
|||
@Produces("application/json")
|
||||
public Iterable<String> getDatasourceDimensions(
|
||||
@PathParam("dataSourceName") String dataSourceName,
|
||||
@QueryParam("interval") String interval
|
||||
@QueryParam("interval") Interval interval
|
||||
)
|
||||
{
|
||||
DruidDataSource dataSource = updateDataSources().get(dataSourceName);
|
||||
List<DataSegment> segments = getSegmentsForDatasources().get(dataSourceName);
|
||||
|
||||
Set<String> retVal = Sets.newHashSet();
|
||||
|
||||
Interval dimInterval;
|
||||
if (interval == null || interval.isEmpty()) {
|
||||
Iterator<DataSegment> iter = Lists.reverse(Lists.newArrayList(dataSource.getSegments())).iterator();
|
||||
DataSegment segment = iter.next();
|
||||
retVal.addAll(segment.getDimensions());
|
||||
|
||||
dimInterval = new Interval(
|
||||
segment.getInterval().getEnd().minus(SEGMENT_HISTORY_MILLIS),
|
||||
segment.getInterval().getEnd()
|
||||
);
|
||||
|
||||
while (iter.hasNext() && dimInterval.contains(segment.getInterval())) {
|
||||
retVal.addAll(segment.getDimensions());
|
||||
segment = iter.next();
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
dimInterval = new Interval(interval);
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw new IAE("Interval is not in a parseable format!");
|
||||
if (interval == null) {
|
||||
DateTime now = new DateTime();
|
||||
interval = new Interval(now.minusMillis(SEGMENT_HISTORY_MILLIS), now);
|
||||
}
|
||||
|
||||
Iterator<DataSegment> iter = dataSource.getSegments().iterator();
|
||||
|
||||
while (iter.hasNext()) {
|
||||
DataSegment segment = iter.next();
|
||||
if (dimInterval.contains(segment.getInterval())) {
|
||||
retVal.addAll(segment.getDimensions());
|
||||
}
|
||||
Set<String> dims = Sets.newHashSet();
|
||||
for (DataSegment segment : segments) {
|
||||
if (interval.overlaps(segment.getInterval())) {
|
||||
dims.addAll(segment.getDimensions());
|
||||
}
|
||||
}
|
||||
|
||||
return retVal;
|
||||
return dims;
|
||||
}
|
||||
|
||||
@GET
|
||||
|
@ -142,46 +123,23 @@ public class ClientInfoResource
|
|||
@Produces("application/json")
|
||||
public Iterable<String> getDatasourceMetrics(
|
||||
@PathParam("dataSourceName") String dataSourceName,
|
||||
@QueryParam("interval") String interval
|
||||
@QueryParam("interval") Interval interval
|
||||
)
|
||||
{
|
||||
DruidDataSource dataSource = updateDataSources().get(dataSourceName);
|
||||
List<DataSegment> segments = getSegmentsForDatasources().get(dataSourceName);
|
||||
|
||||
Set<String> retVal = Sets.newHashSet();
|
||||
|
||||
Interval dimInterval;
|
||||
if (interval == null || interval.isEmpty()) {
|
||||
Iterator<DataSegment> iter = Lists.reverse(Lists.newArrayList(dataSource.getSegments())).iterator();
|
||||
DataSegment segment = iter.next();
|
||||
retVal.addAll(segment.getMetrics());
|
||||
|
||||
dimInterval = new Interval(
|
||||
segment.getInterval().getEnd().minus(SEGMENT_HISTORY_MILLIS),
|
||||
segment.getInterval().getEnd()
|
||||
);
|
||||
|
||||
while (iter.hasNext() && dimInterval.contains(segment.getInterval())) {
|
||||
retVal.addAll(segment.getMetrics());
|
||||
segment = iter.next();
|
||||
}
|
||||
} else {
|
||||
try {
|
||||
dimInterval = new Interval(interval);
|
||||
}
|
||||
catch (Exception e) {
|
||||
throw new IAE("Interval is not in a parseable format!");
|
||||
if (interval == null) {
|
||||
DateTime now = new DateTime();
|
||||
interval = new Interval(now.minusMillis(SEGMENT_HISTORY_MILLIS), now);
|
||||
}
|
||||
|
||||
Iterator<DataSegment> iter = dataSource.getSegments().iterator();
|
||||
|
||||
while (iter.hasNext()) {
|
||||
DataSegment segment = iter.next();
|
||||
if (dimInterval.contains(segment.getInterval())) {
|
||||
retVal.addAll(segment.getMetrics());
|
||||
}
|
||||
Set<String> metrics = Sets.newHashSet();
|
||||
for (DataSegment segment : segments) {
|
||||
if (interval.overlaps(segment.getInterval())) {
|
||||
metrics.addAll(segment.getMetrics());
|
||||
}
|
||||
}
|
||||
|
||||
return retVal;
|
||||
return metrics;
|
||||
}
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue