new endpoints

This commit is contained in:
fjy 2014-03-28 13:55:44 -07:00
parent 652506d1bd
commit 676671e575
3 changed files with 239 additions and 31 deletions

View File

@ -26,6 +26,9 @@ import com.google.common.collect.Lists;
import com.google.common.collect.Maps; import com.google.common.collect.Maps;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import com.google.inject.Inject; import com.google.inject.Inject;
import com.metamx.common.MapUtils;
import com.metamx.common.Pair;
import com.metamx.common.guava.Comparators;
import io.druid.client.DruidDataSource; import io.druid.client.DruidDataSource;
import io.druid.client.DruidServer; import io.druid.client.DruidServer;
import io.druid.client.InventoryView; import io.druid.client.InventoryView;
@ -86,7 +89,7 @@ public class DatasourcesResource
@QueryParam("simple") String simple @QueryParam("simple") String simple
) )
{ {
Response.ResponseBuilder builder = Response.status(Response.Status.OK); Response.ResponseBuilder builder = Response.ok();
if (full != null) { if (full != null) {
return builder.entity(getDataSources()).build(); return builder.entity(getDataSources()).build();
} else if (simple != null) { } else if (simple != null) {
@ -133,7 +136,7 @@ public class DatasourcesResource
{ {
DruidDataSource dataSource = getDataSource(dataSourceName.toLowerCase()); DruidDataSource dataSource = getDataSource(dataSourceName.toLowerCase());
if (dataSource == null) { if (dataSource == null) {
return Response.status(Response.Status.NOT_FOUND).build(); return Response.noContent().build();
} }
return Response.ok(dataSource).build(); return Response.ok(dataSource).build();
@ -147,10 +150,10 @@ public class DatasourcesResource
) )
{ {
if (!databaseSegmentManager.enableDatasource(dataSourceName)) { if (!databaseSegmentManager.enableDatasource(dataSourceName)) {
return Response.status(Response.Status.NOT_FOUND).build(); return Response.noContent().build();
} }
return Response.status(Response.Status.OK).build(); return Response.ok().build();
} }
@DELETE @DELETE
@ -163,15 +166,14 @@ public class DatasourcesResource
) )
{ {
if (indexingServiceClient == null) { if (indexingServiceClient == null) {
return Response.ok().entity(ImmutableMap.of("error", "no indexing service found")).build(); return Response.ok(ImmutableMap.of("error", "no indexing service found")).build();
} }
if (kill != null && Boolean.valueOf(kill)) { if (kill != null && Boolean.valueOf(kill)) {
try { try {
indexingServiceClient.killSegments(dataSourceName, new Interval(interval)); indexingServiceClient.killSegments(dataSourceName, new Interval(interval));
} }
catch (Exception e) { catch (Exception e) {
return Response.status(Response.Status.NOT_FOUND) return Response.serverError().entity(
.entity(
ImmutableMap.of( ImmutableMap.of(
"error", "error",
"Exception occurred. Are you sure you have an indexing service?" "Exception occurred. Are you sure you have an indexing service?"
@ -181,11 +183,144 @@ public class DatasourcesResource
} }
} else { } else {
if (!databaseSegmentManager.removeDatasource(dataSourceName)) { if (!databaseSegmentManager.removeDatasource(dataSourceName)) {
return Response.status(Response.Status.NOT_FOUND).build(); return Response.noContent().build();
} }
} }
return Response.status(Response.Status.OK).build(); return Response.ok().build();
}
@GET
@Path("/{dataSourceName}/intervals")
@Produces("application/json")
public Response getSegmentDataSourceIntervals(
@PathParam("dataSourceName") String dataSourceName,
@QueryParam("simple") String simple,
@QueryParam("full") String full
)
{
final DruidDataSource dataSource = getDataSource(dataSourceName.toLowerCase());
if (dataSource == null) {
return Response.noContent().build();
}
final Comparator<Interval> comparator = Comparators.inverse(Comparators.intervalsByStartThenEnd());
if (full != null) {
final Map<Interval, Map<String, Object>> retVal = Maps.newTreeMap(comparator);
for (DataSegment dataSegment : dataSource.getSegments()) {
Map<String, Object> segments = retVal.get(dataSegment.getInterval());
if (segments == null) {
segments = Maps.newHashMap();
retVal.put(dataSegment.getInterval(), segments);
}
Pair<DataSegment, Set<String>> val = getSegment(dataSegment.getIdentifier());
segments.put("id", dataSegment.getIdentifier());
segments.put("metadata", val.lhs);
segments.put("servers", val.rhs);
}
return Response.ok(retVal).build();
}
if (simple != null) {
final Map<Interval, Map<String, Object>> retVal = Maps.newHashMap();
for (DataSegment dataSegment : dataSource.getSegments()) {
Map<String, Object> properties = retVal.get(dataSegment.getInterval());
if (properties == null) {
properties = Maps.newHashMap();
properties.put("size", dataSegment.getSize());
properties.put("count", 1);
retVal.put(dataSegment.getInterval(), properties);
} else {
properties.put("size", MapUtils.getLong(properties, "size", 0L) + dataSegment.getSize());
properties.put("count", MapUtils.getInt(properties, "count", 0) + 1);
}
}
return Response.ok(retVal).build();
}
final Set<Interval> intervals = Sets.newTreeSet(comparator);
for (DataSegment dataSegment : dataSource.getSegments()) {
intervals.add(dataSegment.getInterval());
}
return Response.ok(intervals).build();
}
@GET
@Path("/{dataSourceName}/intervals/{interval}")
@Produces("application/json")
public Response getSegmentDataSourceSpecificInterval(
@PathParam("dataSourceName") String dataSourceName,
@PathParam("interval") String interval,
@QueryParam("simple") String simple,
@QueryParam("full") String full
)
{
final DruidDataSource dataSource = getDataSource(dataSourceName.toLowerCase());
final Interval theInterval = new Interval(interval.replace("_", "/"));
if (dataSource == null || interval == null) {
return Response.noContent().build();
}
final Comparator<Interval> comparator = Comparators.inverse(Comparators.intervalsByStartThenEnd());
if (full != null) {
final Map<Interval, Map<String, Object>> retVal = Maps.newTreeMap(comparator);
for (DataSegment dataSegment : dataSource.getSegments()) {
if (theInterval.contains(dataSegment.getInterval())) {
Map<String, Object> segments = retVal.get(dataSegment.getInterval());
if (segments == null) {
segments = Maps.newHashMap();
retVal.put(dataSegment.getInterval(), segments);
}
Pair<DataSegment, Set<String>> val = getSegment(dataSegment.getIdentifier());
segments.put("id", dataSegment.getIdentifier());
segments.put("metadata", val.lhs);
segments.put("servers", val.rhs);
}
}
return Response.ok(retVal).build();
}
if (simple != null) {
final Map<Interval, Map<String, Object>> retVal = Maps.newHashMap();
for (DataSegment dataSegment : dataSource.getSegments()) {
if (theInterval.contains(dataSegment.getInterval())) {
Map<String, Object> properties = retVal.get(dataSegment.getInterval());
if (properties == null) {
properties = Maps.newHashMap();
properties.put("size", dataSegment.getSize());
properties.put("count", 1);
retVal.put(dataSegment.getInterval(), properties);
} else {
properties.put("size", MapUtils.getLong(properties, "size", 0L) + dataSegment.getSize());
properties.put("count", MapUtils.getInt(properties, "count", 0) + 1);
}
}
}
return Response.ok(retVal).build();
}
final Set<Interval> intervals = Sets.newTreeSet(comparator);
for (DataSegment dataSegment : dataSource.getSegments()) {
if (theInterval.contains(dataSegment.getInterval())) {
intervals.add(dataSegment.getInterval());
}
}
return Response.ok(intervals).build();
} }
@GET @GET
@ -198,10 +333,10 @@ public class DatasourcesResource
{ {
DruidDataSource dataSource = getDataSource(dataSourceName.toLowerCase()); DruidDataSource dataSource = getDataSource(dataSourceName.toLowerCase());
if (dataSource == null) { if (dataSource == null) {
return Response.status(Response.Status.NOT_FOUND).build(); return Response.noContent().build();
} }
Response.ResponseBuilder builder = Response.status(Response.Status.OK); Response.ResponseBuilder builder = Response.ok();
if (full != null) { if (full != null) {
return builder.entity(dataSource.getSegments()).build(); return builder.entity(dataSource.getSegments()).build();
} }
@ -212,7 +347,7 @@ public class DatasourcesResource
new Function<DataSegment, Object>() new Function<DataSegment, Object>()
{ {
@Override @Override
public Object apply(@Nullable DataSegment segment) public Object apply(DataSegment segment)
{ {
return segment.getIdentifier(); return segment.getIdentifier();
} }
@ -231,15 +366,18 @@ public class DatasourcesResource
{ {
DruidDataSource dataSource = getDataSource(dataSourceName.toLowerCase()); DruidDataSource dataSource = getDataSource(dataSourceName.toLowerCase());
if (dataSource == null) { if (dataSource == null) {
return Response.status(Response.Status.NOT_FOUND).build(); return Response.noContent().build();
} }
for (DataSegment segment : dataSource.getSegments()) { Pair<DataSegment, Set<String>> retVal = getSegment(segmentId);
if (segment.getIdentifier().equalsIgnoreCase(segmentId)) {
return Response.status(Response.Status.OK).entity(segment).build(); if (retVal != null) {
return Response.ok(
ImmutableMap.of("metadata", retVal.lhs, "servers", retVal.rhs)
).build();
} }
}
return Response.status(Response.Status.NOT_FOUND).build(); return Response.noContent().build();
} }
@DELETE @DELETE
@ -250,10 +388,10 @@ public class DatasourcesResource
) )
{ {
if (!databaseSegmentManager.removeSegment(dataSourceName, segmentId)) { if (!databaseSegmentManager.removeSegment(dataSourceName, segmentId)) {
return Response.status(Response.Status.NOT_FOUND).build(); return Response.noContent().build();
} }
return Response.status(Response.Status.OK).build(); return Response.ok().build();
} }
@POST @POST
@ -265,10 +403,27 @@ public class DatasourcesResource
) )
{ {
if (!databaseSegmentManager.enableSegment(segmentId)) { if (!databaseSegmentManager.enableSegment(segmentId)) {
return Response.status(Response.Status.NOT_FOUND).build(); return Response.noContent().build();
} }
return Response.status(Response.Status.OK).build(); return Response.ok().build();
}
@GET
@Path("/{dataSourceName}/tiers")
@Produces("application/json")
public Response getSegmentDataSourceTiers(
@PathParam("dataSourceName") String dataSourceName
)
{
Set<String> retVal = Sets.newHashSet();
for (DruidServer druidServer : serverInventoryView.getInventory()) {
if(druidServer.getDataSource(dataSourceName) != null) {
retVal.add(druidServer.getTier());
}
}
return Response.ok(retVal).build();
} }
private DruidDataSource getDataSource(final String dataSourceName) private DruidDataSource getDataSource(final String dataSourceName)
@ -345,4 +500,23 @@ public class DatasourcesResource
); );
return dataSources; return dataSources;
} }
private Pair<DataSegment, Set<String>> getSegment(String segmentId)
{
DataSegment theSegment = null;
Set<String> servers = Sets.newHashSet();
for (DruidServer druidServer : serverInventoryView.getInventory()) {
DataSegment currSegment = druidServer.getSegments().get(segmentId);
if (currSegment != null) {
theSegment = currSegment;
servers.add(druidServer.getHost());
}
}
if (theSegment == null) {
return null;
}
return new Pair<>(theSegment, servers);
}
} }

View File

@ -48,6 +48,8 @@ public class ServersResource
return new ImmutableMap.Builder<String, Object>() return new ImmutableMap.Builder<String, Object>()
.put("host", input.getHost()) .put("host", input.getHost())
.put("tier", input.getTier()) .put("tier", input.getTier())
.put("type", input.getType())
.put("priority", input.getPriority())
.put("currSize", input.getCurrSize()) .put("currSize", input.getCurrSize())
.put("maxSize", input.getMaxSize()) .put("maxSize", input.getMaxSize())
.build(); .build();

View File

@ -19,18 +19,19 @@
package io.druid.server.http; package io.druid.server.http;
import com.google.api.client.util.Lists;
import com.google.api.client.util.Maps; import com.google.api.client.util.Maps;
import com.google.common.collect.HashBasedTable; import com.google.common.base.Function;
import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables;
import com.google.common.collect.Sets; import com.google.common.collect.Sets;
import com.google.common.collect.Table;
import com.google.inject.Inject; import com.google.inject.Inject;
import com.metamx.common.MapUtils; import io.druid.client.DruidDataSource;
import io.druid.client.DruidServer; import io.druid.client.DruidServer;
import io.druid.client.InventoryView; import io.druid.client.InventoryView;
import javax.ws.rs.GET; import javax.ws.rs.GET;
import javax.ws.rs.Path; import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces; import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam; import javax.ws.rs.QueryParam;
import javax.ws.rs.core.Response; import javax.ws.rs.core.Response;
@ -86,4 +87,35 @@ public class TiersResource
return builder.entity(tiers).build(); return builder.entity(tiers).build();
} }
@GET
@Path("/{tierName}")
@Produces("application/json")
public Response getTierDatasources(
@PathParam("tierName") String tierName
)
{
Set<String> retVal = Sets.newHashSet();
for (DruidServer druidServer : serverInventoryView.getInventory()) {
if (druidServer.getTier().equalsIgnoreCase(tierName)) {
retVal.addAll(
Lists.newArrayList(
Iterables.transform(
druidServer.getDataSources(),
new Function<DruidDataSource, String>()
{
@Override
public String apply(DruidDataSource input)
{
return input.getName();
}
}
)
)
);
}
}
return Response.ok(retVal).build();
}
} }