mirror of https://github.com/apache/druid.git
fix-3010: look through all versions to find the set with complete partitions (#3013)
This commit is contained in:
parent
245077b47f
commit
7e67397b5a
|
@ -422,9 +422,16 @@ public class VersionedIntervalTimeline<VersionType, ObjectType> implements Timel
|
||||||
|
|
||||||
for (Map.Entry<Interval, TreeMap<VersionType, TimelineEntry>> versionEntry : allTimelineEntries.entrySet()) {
|
for (Map.Entry<Interval, TreeMap<VersionType, TimelineEntry>> versionEntry : allTimelineEntries.entrySet()) {
|
||||||
if (versionEntry.getKey().overlap(interval) != null) {
|
if (versionEntry.getKey().overlap(interval) != null) {
|
||||||
TimelineEntry timelineEntry = versionEntry.getValue().lastEntry().getValue();
|
if (incompleteOk) {
|
||||||
if (timelineEntry.getPartitionHolder().isComplete() || incompleteOk) {
|
add(timeline, versionEntry.getKey(), versionEntry.getValue().lastEntry().getValue());
|
||||||
add(timeline, versionEntry.getKey(), timelineEntry);
|
} else {
|
||||||
|
for (VersionType ver : versionEntry.getValue().descendingKeySet()) {
|
||||||
|
TimelineEntry timelineEntry = versionEntry.getValue().get(ver);
|
||||||
|
if (timelineEntry.getPartitionHolder().isComplete()) {
|
||||||
|
add(timeline, versionEntry.getKey(), timelineEntry);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -1504,7 +1504,68 @@ public class VersionedIntervalTimelineTest
|
||||||
|
|
||||||
Assert.assertTrue(timeline.lookup(Interval.parse("1970/1980")).isEmpty());
|
Assert.assertTrue(timeline.lookup(Interval.parse("1970/1980")).isEmpty());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// https://github.com/druid-io/druid/issues/3010
|
||||||
|
@Test
|
||||||
|
public void testRemoveIncompleteKeepsComplete() throws Exception
|
||||||
|
{
|
||||||
|
timeline = makeStringIntegerTimeline();
|
||||||
|
|
||||||
|
add("2011-04-01/2011-04-02", "1", IntegerPartitionChunk.make(null, 1, 0, 77));
|
||||||
|
add("2011-04-01/2011-04-02", "1", IntegerPartitionChunk.make(1, null, 1, 88));
|
||||||
|
add("2011-04-01/2011-04-02", "2", IntegerPartitionChunk.make(null, 1, 0, 99));
|
||||||
|
|
||||||
|
assertValues(
|
||||||
|
ImmutableList.of(
|
||||||
|
createExpected("2011-04-01/2011-04-02", "1",
|
||||||
|
Arrays.<PartitionChunk<Integer>>asList(
|
||||||
|
IntegerPartitionChunk.make(null, 1, 0, 77),
|
||||||
|
IntegerPartitionChunk.make(1, null, 1, 88)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
),
|
||||||
|
timeline.lookup(new Interval("2011-04-01/2011-04-02"))
|
||||||
|
);
|
||||||
|
|
||||||
|
add("2011-04-01/2011-04-02", "3", IntegerPartitionChunk.make(null, 1, 0, 110));
|
||||||
|
|
||||||
|
assertValues(
|
||||||
|
ImmutableList.of(
|
||||||
|
createExpected("2011-04-01/2011-04-02", "1",
|
||||||
|
Arrays.<PartitionChunk<Integer>>asList(
|
||||||
|
IntegerPartitionChunk.make(null, 1, 0, 77),
|
||||||
|
IntegerPartitionChunk.make(1, null, 1, 88)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
),
|
||||||
|
timeline.lookup(new Interval("2011-04-01/2011-04-02"))
|
||||||
|
);
|
||||||
|
assertValues(
|
||||||
|
Sets.newHashSet(
|
||||||
|
createExpected("2011-04-01/2011-04-02", "2",
|
||||||
|
Arrays.<PartitionChunk<Integer>>asList(
|
||||||
|
IntegerPartitionChunk.make(null, 1, 0, 99)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
),
|
||||||
|
timeline.findOvershadowed()
|
||||||
|
);
|
||||||
|
|
||||||
|
testRemove();
|
||||||
|
|
||||||
|
assertValues(
|
||||||
|
ImmutableList.of(
|
||||||
|
createExpected("2011-04-01/2011-04-02", "1",
|
||||||
|
Arrays.<PartitionChunk<Integer>>asList(
|
||||||
|
IntegerPartitionChunk.make(null, 1, 0, 77),
|
||||||
|
IntegerPartitionChunk.make(1, null, 1, 88)
|
||||||
|
)
|
||||||
|
)
|
||||||
|
),
|
||||||
|
timeline.lookup(new Interval("2011-04-01/2011-04-02"))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
private Pair<Interval, Pair<String, PartitionHolder<Integer>>> createExpected(
|
private Pair<Interval, Pair<String, PartitionHolder<Integer>>> createExpected(
|
||||||
String intervalString,
|
String intervalString,
|
||||||
String version,
|
String version,
|
||||||
|
|
Loading…
Reference in New Issue