diff --git a/docs/content/design/coordinator.md b/docs/content/design/coordinator.md index e959d73d33c..56b3e9cfd10 100644 --- a/docs/content/design/coordinator.md +++ b/docs/content/design/coordinator.md @@ -105,6 +105,14 @@ Returns a list of all segments for a datasource as stored in the metadata store. Returns a list of all segments for a datasource with the full segment metadata as stored in the metadata store. +* POST `/druid/coordinator/v1/metadata/datasources/{dataSourceName}/segments` + +Returns a list of all segments, overlapping with any of given intervals, for a datasource as stored in the metadata store. Request body is array of string intervals like [interval1, interval2,...] for example ["2012-01-01T00:00:00.000/2012-01-03T00:00:00.000", "2012-01-05T00:00:00.000/2012-01-07T00:00:00.000"] + +* POST `/druid/coordinator/v1/metadata/datasources/{dataSourceName}/segments?full` + +Returns a list of all segments, overlapping with any of given intervals, for a datasource with the full segment metadata as stored in the metadata store. Request body is array of string intervals like [interval1, interval2,...] for example ["2012-01-01T00:00:00.000/2012-01-03T00:00:00.000", "2012-01-05T00:00:00.000/2012-01-07T00:00:00.000"] + * `/druid/coordinator/v1/metadata/datasources/{dataSourceName}/segments/{segmentId}` Returns full segment metadata for a specific segment as stored in the metadata store. diff --git a/docs/content/ingestion/update-existing-data.md b/docs/content/ingestion/update-existing-data.md index c8284e8d0ca..26bdc005aaf 100644 --- a/docs/content/ingestion/update-existing-data.md +++ b/docs/content/ingestion/update-existing-data.md @@ -51,6 +51,7 @@ Here is what goes inside `ingestionSpec`: |-----|----|-----------|--------| |dataSource|String|Druid dataSource name from which you are loading the data.|yes| |intervals|List|A list of strings representing ISO-8601 Intervals.|yes| +|segments|List|List of segments from which to read data from, by default it is obtained automatically. You can obtain list of segments to put here by making a POST query to coordinator at url /druid/coordinator/v1/metadata/datasources/segments?full with list of intervals specified in the request paylod e.g. ["2012-01-01T00:00:00.000/2012-01-03T00:00:00.000", "2012-01-05T00:00:00.000/2012-01-07T00:00:00.000"]. You may want to provide this list manually in order to ensure that segments read are exactly same as they were at the time of task submission, task would fail if the list provided by the user does not match with state of database when the task actually runs.|no| |granularity|String|Defines the granularity of the query while loading data. Default value is "none". See [Granularities](../querying/granularities.html).|no| |filter|JSON|See [Filters](../querying/filters.html)|no| |dimensions|Array of String|Name of dimension columns to load. By default, the list will be constructed from parseSpec. If parseSpec does not have an explicit list of dimensions then all the dimension columns present in stored data will be read.|no| @@ -75,8 +76,7 @@ For example #### `multi` -This is a composing inputSpec to combine other inputSpecs. This inputSpec is used for delta ingestion. -Please note that delta ingestion is not an idempotent operation. We may add change things in future to make it idempotent. +This is a composing inputSpec to combine other inputSpecs. This inputSpec is used for delta ingestion. Please note that you can have only one `dataSource` as child of `multi` inputSpec. |Field|Type|Description|Required| |-----|----|-----------|--------| @@ -94,7 +94,26 @@ For example: "type" : "dataSource", "ingestionSpec" : { "dataSource": "wikipedia", - "intervals": ["2014-10-20T00:00:00Z/P2W"] + "intervals": ["2012-01-01T00:00:00.000/2012-01-03T00:00:00.000", "2012-01-05T00:00:00.000/2012-01-07T00:00:00.000"], + "segments": [ + { + "dataSource": "test1", + "interval": "2012-01-01T00:00:00.000/2012-01-03T00:00:00.000", + "version": "v2", + "loadSpec": { + "type": "local", + "path": "/tmp/index1.zip" + }, + "dimensions": "host", + "metrics": "visited_sum,unique_hosts", + "shardSpec": { + "type": "none" + }, + "binaryVersion": 9, + "size": 2, + "identifier": "test1_2000-01-01T00:00:00.000Z_3000-01-01T00:00:00.000Z_v2" + } + ] } }, { @@ -107,6 +126,11 @@ For example: } ``` +It is STRONGLY RECOMMENDED to provide list of segments in `dataSource` inputSpec explicitly so that your delta ingestion task is idempotent. You can obtain that list of segments by making following call to the coordinator. +POST `/druid/coordinator/v1/metadata/datasources/{dataSourceName}/segments?full` +Request Body: [interval1, interval2,...] for example ["2012-01-01T00:00:00.000/2012-01-03T00:00:00.000", "2012-01-05T00:00:00.000/2012-01-07T00:00:00.000"] + + ### Reindexing without Hadoop Batch Ingestion This section assumes the reader understands how to do batch ingestion without Hadoop using the [IndexTask](../ingestion/tasks.html#index-task), diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopIngestionSpec.java b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopIngestionSpec.java index 829b980aabc..aea58796b65 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/HadoopIngestionSpec.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/HadoopIngestionSpec.java @@ -37,8 +37,10 @@ import io.druid.timeline.partition.PartitionChunk; import org.joda.time.Interval; import java.io.IOException; +import java.util.HashSet; import java.util.List; import java.util.Map; +import java.util.Set; /** */ @@ -168,10 +170,32 @@ public class HadoopIngestionSpec extends IngestionSpec segmentsList = segmentLister.getUsedSegmentsForIntervals( ingestionSpecObj.getDataSource(), ingestionSpecObj.getIntervals() ); + + if (ingestionSpecObj.getSegments() != null) { + //ensure that user supplied segment list matches with the segmentsList obtained from db + //this safety check lets users do test-n-set kind of batch delta ingestion where the delta + //ingestion task would only run if current state of the system is same as when they submitted + //the task. + List userSuppliedSegmentsList = ingestionSpecObj.getSegments(); + + if (segmentsList.size() == userSuppliedSegmentsList.size()) { + Set segmentsSet = new HashSet<>(segmentsList); + + for (DataSegment userSegment : userSuppliedSegmentsList) { + if (!segmentsSet.contains(userSegment)) { + throw new IOException("user supplied segments list did not match with segments list obtained from db"); + } + } + } else { + throw new IOException("user supplied segments list did not match with segments list obtained from db"); + } + } + VersionedIntervalTimeline timeline = new VersionedIntervalTimeline<>(Ordering.natural()); for (DataSegment segment : segmentsList) { timeline.add(segment.getInterval(), segment.getVersion(), segment.getShardSpec().createChunk(segment)); diff --git a/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceIngestionSpec.java b/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceIngestionSpec.java index 497806a408e..e7e7bb8c1b9 100644 --- a/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceIngestionSpec.java +++ b/indexing-hadoop/src/main/java/io/druid/indexer/hadoop/DatasourceIngestionSpec.java @@ -26,6 +26,7 @@ import com.google.common.collect.ImmutableList; import io.druid.common.utils.JodaUtils; import io.druid.granularity.QueryGranularity; import io.druid.query.filter.DimFilter; +import io.druid.timeline.DataSegment; import org.joda.time.Interval; import java.util.List; @@ -34,6 +35,7 @@ public class DatasourceIngestionSpec { private final String dataSource; private final List intervals; + private final List segments; private final DimFilter filter; private final QueryGranularity granularity; private final List dimensions; @@ -45,6 +47,7 @@ public class DatasourceIngestionSpec @JsonProperty("dataSource") String dataSource, @Deprecated @JsonProperty("interval") Interval interval, @JsonProperty("intervals") List intervals, + @JsonProperty("segments") List segments, @JsonProperty("filter") DimFilter filter, @JsonProperty("granularity") QueryGranularity granularity, @JsonProperty("dimensions") List dimensions, @@ -67,6 +70,11 @@ public class DatasourceIngestionSpec } this.intervals = Preconditions.checkNotNull(theIntervals, "no intervals found"); + // note that it is important to have intervals even if user explicitly specifies the list of + // segments, because segment list's min/max boundaries might not align the intended interval + // to read in all cases. + this.segments = segments; + this.filter = filter; this.granularity = granularity == null ? QueryGranularity.NONE : granularity; @@ -88,6 +96,12 @@ public class DatasourceIngestionSpec return intervals; } + @JsonProperty + public List getSegments() + { + return segments; + } + @JsonProperty public DimFilter getFilter() { @@ -124,6 +138,7 @@ public class DatasourceIngestionSpec dataSource, null, intervals, + segments, filter, granularity, dimensions, @@ -138,6 +153,7 @@ public class DatasourceIngestionSpec dataSource, null, intervals, + segments, filter, granularity, dimensions, @@ -152,6 +168,7 @@ public class DatasourceIngestionSpec dataSource, null, intervals, + segments, filter, granularity, dimensions, @@ -166,6 +183,7 @@ public class DatasourceIngestionSpec dataSource, null, intervals, + segments, filter, granularity, dimensions, @@ -195,6 +213,9 @@ public class DatasourceIngestionSpec if (!intervals.equals(that.intervals)) { return false; } + if (segments != null ? !segments.equals(that.segments) : that.segments != null) { + return false; + } if (filter != null ? !filter.equals(that.filter) : that.filter != null) { return false; } @@ -213,6 +234,7 @@ public class DatasourceIngestionSpec { int result = dataSource.hashCode(); result = 31 * result + intervals.hashCode(); + result = 31 * result + (segments != null ? segments.hashCode() : 0); result = 31 * result + (filter != null ? filter.hashCode() : 0); result = 31 * result + granularity.hashCode(); result = 31 * result + (dimensions != null ? dimensions.hashCode() : 0); @@ -227,6 +249,7 @@ public class DatasourceIngestionSpec return "DatasourceIngestionSpec{" + "dataSource='" + dataSource + '\'' + ", intervals=" + intervals + + ", segments=" + segments + ", filter=" + filter + ", granularity=" + granularity + ", dimensions=" + dimensions + diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest.java index acedfe7c105..f081a4cfc42 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest.java @@ -43,6 +43,7 @@ import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; +import java.io.IOException; import java.util.Map; /** @@ -91,7 +92,7 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest PathSpec pathSpec = new DatasourcePathSpec( jsonMapper, null, - new DatasourceIngestionSpec(testDatasource, testDatasourceInterval, null, null, null, null, null, false), + new DatasourceIngestionSpec(testDatasource, testDatasourceInterval, null, null, null, null, null, null, false), null ); HadoopDruidIndexerConfig config = testRunUpdateSegmentListIfDatasourcePathSpecIsUsed( @@ -104,6 +105,60 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest ); } + @Test + public void testupdateSegmentListIfDatasourcePathSpecWithMatchingUserSegments() throws Exception + { + PathSpec pathSpec = new DatasourcePathSpec( + jsonMapper, + null, + new DatasourceIngestionSpec( + testDatasource, + testDatasourceInterval, + null, + ImmutableList.of(SEGMENT), + null, + null, + null, + null, + false + ), + null + ); + HadoopDruidIndexerConfig config = testRunUpdateSegmentListIfDatasourcePathSpecIsUsed( + pathSpec, + testDatasourceInterval + ); + Assert.assertEquals( + ImmutableList.of(WindowedDataSegment.of(SEGMENT)), + ((DatasourcePathSpec) config.getPathSpec()).getSegments() + ); + } + + @Test(expected = IOException.class) + public void testupdateSegmentListThrowsExceptionWithUserSegmentsMismatch() throws Exception + { + PathSpec pathSpec = new DatasourcePathSpec( + jsonMapper, + null, + new DatasourceIngestionSpec( + testDatasource, + testDatasourceInterval, + null, + ImmutableList.of(SEGMENT.withVersion("v2")), + null, + null, + null, + null, + false + ), + null + ); + testRunUpdateSegmentListIfDatasourcePathSpecIsUsed( + pathSpec, + testDatasourceInterval + ); + } + @Test public void testupdateSegmentListIfDatasourcePathSpecIsUsedWithJustDatasourcePathSpecAndPartialInterval() throws Exception @@ -111,7 +166,17 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest PathSpec pathSpec = new DatasourcePathSpec( jsonMapper, null, - new DatasourceIngestionSpec(testDatasource, testDatasourceIntervalPartial, null, null, null, null, null, false), + new DatasourceIngestionSpec( + testDatasource, + testDatasourceIntervalPartial, + null, + null, + null, + null, + null, + null, + false + ), null ); HadoopDruidIndexerConfig config = testRunUpdateSegmentListIfDatasourcePathSpecIsUsed( @@ -133,7 +198,17 @@ public class HadoopIngestionSpecUpdateDatasourcePathSpecSegmentsTest new DatasourcePathSpec( jsonMapper, null, - new DatasourceIngestionSpec(testDatasource, testDatasourceInterval, null, null, null, null, null, false), + new DatasourceIngestionSpec( + testDatasource, + testDatasourceInterval, + null, + null, + null, + null, + null, + null, + false + ), null ) ) diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceIngestionSpecTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceIngestionSpecTest.java index 8b2b837bc12..e68999563db 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceIngestionSpecTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceIngestionSpecTest.java @@ -25,6 +25,7 @@ import com.google.common.collect.Lists; import io.druid.granularity.QueryGranularity; import io.druid.query.filter.SelectorDimFilter; import io.druid.segment.TestHelper; +import io.druid.timeline.DataSegment; import org.joda.time.Interval; import org.junit.Assert; import org.junit.Test; @@ -46,6 +47,7 @@ public class DatasourceIngestionSpecTest "test", interval, null, + null, new SelectorDimFilter("dim", "value"), QueryGranularity.DAY, Lists.newArrayList("d1", "d2"), @@ -84,6 +86,7 @@ public class DatasourceIngestionSpecTest null, null, null, + null, false ); @@ -93,6 +96,18 @@ public class DatasourceIngestionSpecTest jsonStr = "{\n" + " \"dataSource\": \"test\",\n" + " \"intervals\": [\"2014/2015\", \"2016/2017\"],\n" + + " \"segments\": [{\n" + + " \"dataSource\":\"test\",\n" + + " \"interval\":\"2014-01-01T00:00:00.000Z/2017-01-01T00:00:00.000Z\",\n" + + " \"version\":\"v0\",\n" + + " \"loadSpec\":null,\n" + + " \"dimensions\":\"\",\n" + + " \"metrics\":\"\",\n" + + " \"shardSpec\":{\"type\":\"none\"},\n" + + " \"binaryVersion\":9,\n" + + " \"size\":128,\n" + + " \"identifier\":\"test_2014-01-01T00:00:00.000Z_2017-01-01T00:00:00.000Z_v0\"\n" + + " }],\n" + " \"filter\": { \"type\": \"selector\", \"dimension\": \"dim\", \"value\": \"value\"},\n" + " \"granularity\": \"day\",\n" + " \"dimensions\": [\"d1\", \"d2\"],\n" @@ -104,6 +119,19 @@ public class DatasourceIngestionSpecTest "test", null, intervals, + ImmutableList.of( + new DataSegment( + "test", + Interval.parse("2014/2017"), + "v0", + null, + null, + null, + null, + 9, + 128 + ) + ), new SelectorDimFilter("dim", "value"), QueryGranularity.DAY, Lists.newArrayList("d1", "d2"), @@ -128,7 +156,7 @@ public class DatasourceIngestionSpecTest DatasourceIngestionSpec actual = MAPPER.readValue(jsonStr, DatasourceIngestionSpec.class); Assert.assertEquals( - new DatasourceIngestionSpec("test", Interval.parse("2014/2015"), null, null, null, null, null, false), + new DatasourceIngestionSpec("test", Interval.parse("2014/2015"), null, null, null, null, null, null, false), actual ); } diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceRecordReaderTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceRecordReaderTest.java index b910e9b49b0..c0ae5ecf69a 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceRecordReaderTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/hadoop/DatasourceRecordReaderTest.java @@ -67,6 +67,7 @@ public class DatasourceRecordReaderTest null, null, null, + null, segment.getDimensions(), segment.getMetrics(), false diff --git a/indexing-hadoop/src/test/java/io/druid/indexer/path/DatasourcePathSpecTest.java b/indexing-hadoop/src/test/java/io/druid/indexer/path/DatasourcePathSpecTest.java index 6385c79544c..b74096a572f 100644 --- a/indexing-hadoop/src/test/java/io/druid/indexer/path/DatasourcePathSpecTest.java +++ b/indexing-hadoop/src/test/java/io/druid/indexer/path/DatasourcePathSpecTest.java @@ -80,6 +80,7 @@ public class DatasourcePathSpecTest null, null, null, + null, false ); diff --git a/server/src/main/java/io/druid/server/http/MetadataResource.java b/server/src/main/java/io/druid/server/http/MetadataResource.java index 2f96a708029..294165402f3 100644 --- a/server/src/main/java/io/druid/server/http/MetadataResource.java +++ b/server/src/main/java/io/druid/server/http/MetadataResource.java @@ -24,16 +24,20 @@ import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.inject.Inject; import io.druid.client.DruidDataSource; +import io.druid.indexing.overlord.IndexerMetadataStorageCoordinator; import io.druid.metadata.MetadataSegmentManager; import io.druid.timeline.DataSegment; +import org.joda.time.Interval; import javax.ws.rs.GET; +import javax.ws.rs.POST; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; +import java.io.IOException; import java.util.Collections; import java.util.List; @@ -43,13 +47,16 @@ import java.util.List; public class MetadataResource { private final MetadataSegmentManager metadataSegmentManager; + private final IndexerMetadataStorageCoordinator metadataStorageCoordinator; @Inject public MetadataResource( - MetadataSegmentManager metadataSegmentManager + MetadataSegmentManager metadataSegmentManager, + IndexerMetadataStorageCoordinator metadataStorageCoordinator ) { this.metadataSegmentManager = metadataSegmentManager; + this.metadataStorageCoordinator = metadataStorageCoordinator; } @GET @@ -123,10 +130,47 @@ public class MetadataResource return builder.entity( Iterables.transform( dataSource.getSegments(), - new Function() + new Function() { @Override - public Object apply(DataSegment segment) + public String apply(DataSegment segment) + { + return segment.getIdentifier(); + } + } + ) + ).build(); + } + + @POST + @Path("/datasources/{dataSourceName}/segments") + @Produces(MediaType.APPLICATION_JSON) + public Response getDatabaseSegmentDataSourceSegments( + @PathParam("dataSourceName") String dataSourceName, + @QueryParam("full") String full, + List intervals + ) + { + List segments = null; + try { + segments = metadataStorageCoordinator.getUsedSegmentsForIntervals(dataSourceName, intervals); + } + catch (IOException ex) { + return Response.status(Response.Status.INTERNAL_SERVER_ERROR).entity(ex.getMessage()).build(); + } + + Response.ResponseBuilder builder = Response.status(Response.Status.OK); + if (full != null) { + return builder.entity(segments).build(); + } + + return builder.entity( + Iterables.transform( + segments, + new Function() + { + @Override + public String apply(DataSegment segment) { return segment.getIdentifier(); }