mirror of https://github.com/apache/druid.git
Don't log the entire task spec (#10278)
* Don't log the entire task spec * fix lgtm * fix serde * address comments and add tests * fix tests * remove unnecessary codes
This commit is contained in:
parent
0891b1f833
commit
9a81740281
|
@ -19,11 +19,18 @@
|
||||||
|
|
||||||
package org.apache.druid.common.utils;
|
package org.apache.druid.common.utils;
|
||||||
|
|
||||||
|
import com.google.common.annotations.VisibleForTesting;
|
||||||
import com.google.common.base.Joiner;
|
import com.google.common.base.Joiner;
|
||||||
import com.google.common.base.Preconditions;
|
import com.google.common.base.Preconditions;
|
||||||
import com.google.common.base.Strings;
|
import com.google.common.base.Strings;
|
||||||
|
import org.apache.druid.java.util.common.DateTimes;
|
||||||
import org.apache.druid.java.util.common.StringUtils;
|
import org.apache.druid.java.util.common.StringUtils;
|
||||||
|
import org.joda.time.DateTime;
|
||||||
|
import org.joda.time.Interval;
|
||||||
|
|
||||||
|
import javax.annotation.Nullable;
|
||||||
|
import java.util.ArrayList;
|
||||||
|
import java.util.List;
|
||||||
import java.util.concurrent.ThreadLocalRandom;
|
import java.util.concurrent.ThreadLocalRandom;
|
||||||
import java.util.regex.Matcher;
|
import java.util.regex.Matcher;
|
||||||
import java.util.regex.Pattern;
|
import java.util.regex.Pattern;
|
||||||
|
@ -68,4 +75,44 @@ public class IdUtils
|
||||||
{
|
{
|
||||||
return UNDERSCORE_JOINER.join(prefix, IdUtils.getRandomId());
|
return UNDERSCORE_JOINER.join(prefix, IdUtils.getRandomId());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
public static String newTaskId(String typeName, String dataSource, @Nullable Interval interval)
|
||||||
|
{
|
||||||
|
return newTaskId(null, typeName, dataSource, interval);
|
||||||
|
}
|
||||||
|
|
||||||
|
public static String newTaskId(@Nullable String idPrefix, String typeName, String dataSource, @Nullable Interval interval)
|
||||||
|
{
|
||||||
|
return newTaskId(idPrefix, getRandomId(), DateTimes.nowUtc(), typeName, dataSource, interval);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* This method is only visible to outside only for testing.
|
||||||
|
* Use {@link #newTaskId(String, String, Interval)} or {@link #newTaskId(String, String, String, Interval)} instead.
|
||||||
|
*/
|
||||||
|
@VisibleForTesting
|
||||||
|
static String newTaskId(
|
||||||
|
@Nullable String idPrefix,
|
||||||
|
String idSuffix,
|
||||||
|
DateTime now,
|
||||||
|
String typeName,
|
||||||
|
String dataSource,
|
||||||
|
@Nullable Interval interval
|
||||||
|
)
|
||||||
|
{
|
||||||
|
final List<String> objects = new ArrayList<>();
|
||||||
|
if (idPrefix != null) {
|
||||||
|
objects.add(idPrefix);
|
||||||
|
}
|
||||||
|
objects.add(typeName);
|
||||||
|
objects.add(dataSource);
|
||||||
|
objects.add(idSuffix);
|
||||||
|
if (interval != null) {
|
||||||
|
objects.add(interval.getStart().toString());
|
||||||
|
objects.add(interval.getEnd().toString());
|
||||||
|
}
|
||||||
|
objects.add(now.toString());
|
||||||
|
|
||||||
|
return String.join("_", objects);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -19,6 +19,9 @@
|
||||||
|
|
||||||
package org.apache.druid.common.utils;
|
package org.apache.druid.common.utils;
|
||||||
|
|
||||||
|
import org.apache.druid.java.util.common.DateTimes;
|
||||||
|
import org.apache.druid.java.util.common.Intervals;
|
||||||
|
import org.junit.Assert;
|
||||||
import org.junit.Rule;
|
import org.junit.Rule;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
import org.junit.rules.ExpectedException;
|
import org.junit.rules.ExpectedException;
|
||||||
|
@ -108,4 +111,50 @@ public class IdUtilsTest
|
||||||
expectedException.expectMessage("thingToValidate cannot contain whitespace character except space.");
|
expectedException.expectMessage("thingToValidate cannot contain whitespace character except space.");
|
||||||
IdUtils.validateId(THINGO, "form\u000cfeed?");
|
IdUtils.validateId(THINGO, "form\u000cfeed?");
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testNewTaskIdWithoutInterval()
|
||||||
|
{
|
||||||
|
final String id = IdUtils.newTaskId(
|
||||||
|
"prefix",
|
||||||
|
"suffix",
|
||||||
|
DateTimes.of("2020-01-01"),
|
||||||
|
"type",
|
||||||
|
"datasource",
|
||||||
|
null
|
||||||
|
);
|
||||||
|
final String expected = String.join(
|
||||||
|
"_",
|
||||||
|
"prefix",
|
||||||
|
"type",
|
||||||
|
"datasource",
|
||||||
|
"suffix",
|
||||||
|
DateTimes.of("2020-01-01").toString()
|
||||||
|
);
|
||||||
|
Assert.assertEquals(expected, id);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testNewTaskIdWithInterval()
|
||||||
|
{
|
||||||
|
final String id = IdUtils.newTaskId(
|
||||||
|
"prefix",
|
||||||
|
"suffix",
|
||||||
|
DateTimes.of("2020-01-01"),
|
||||||
|
"type",
|
||||||
|
"datasource",
|
||||||
|
Intervals.of("2020-01-01/2020-06-01")
|
||||||
|
);
|
||||||
|
final String expected = String.join(
|
||||||
|
"_",
|
||||||
|
"prefix",
|
||||||
|
"type",
|
||||||
|
"datasource",
|
||||||
|
"suffix",
|
||||||
|
DateTimes.of("2020-01-01").toString(),
|
||||||
|
DateTimes.of("2020-06-01").toString(),
|
||||||
|
DateTimes.of("2020-01-01").toString()
|
||||||
|
);
|
||||||
|
Assert.assertEquals(expected, id);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -21,7 +21,6 @@ package org.apache.druid.indexing.common.task;
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonIgnore;
|
import com.fasterxml.jackson.annotation.JsonIgnore;
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
import com.google.common.base.Joiner;
|
|
||||||
import com.google.common.base.Objects;
|
import com.google.common.base.Objects;
|
||||||
import com.google.common.base.Preconditions;
|
import com.google.common.base.Preconditions;
|
||||||
import org.apache.druid.common.utils.IdUtils;
|
import org.apache.druid.common.utils.IdUtils;
|
||||||
|
@ -29,22 +28,18 @@ import org.apache.druid.indexer.TaskStatus;
|
||||||
import org.apache.druid.indexing.common.TaskLock;
|
import org.apache.druid.indexing.common.TaskLock;
|
||||||
import org.apache.druid.indexing.common.actions.LockListAction;
|
import org.apache.druid.indexing.common.actions.LockListAction;
|
||||||
import org.apache.druid.indexing.common.actions.TaskActionClient;
|
import org.apache.druid.indexing.common.actions.TaskActionClient;
|
||||||
import org.apache.druid.java.util.common.DateTimes;
|
|
||||||
import org.apache.druid.query.Query;
|
import org.apache.druid.query.Query;
|
||||||
import org.apache.druid.query.QueryRunner;
|
import org.apache.druid.query.QueryRunner;
|
||||||
import org.joda.time.Interval;
|
import org.joda.time.Interval;
|
||||||
|
|
||||||
import javax.annotation.Nullable;
|
import javax.annotation.Nullable;
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.HashMap;
|
import java.util.HashMap;
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
public abstract class AbstractTask implements Task
|
public abstract class AbstractTask implements Task
|
||||||
{
|
{
|
||||||
private static final Joiner ID_JOINER = Joiner.on("_");
|
|
||||||
|
|
||||||
@JsonIgnore
|
@JsonIgnore
|
||||||
private final String id;
|
private final String id;
|
||||||
|
|
||||||
|
@ -80,29 +75,18 @@ public abstract class AbstractTask implements Task
|
||||||
this.context = context == null ? new HashMap<>() : new HashMap<>(context);
|
this.context = context == null ? new HashMap<>() : new HashMap<>(context);
|
||||||
}
|
}
|
||||||
|
|
||||||
public static String getOrMakeId(String id, final String typeName, String dataSource)
|
public static String getOrMakeId(@Nullable String id, final String typeName, String dataSource)
|
||||||
{
|
{
|
||||||
return getOrMakeId(id, typeName, dataSource, null);
|
return getOrMakeId(id, typeName, dataSource, null);
|
||||||
}
|
}
|
||||||
|
|
||||||
static String getOrMakeId(String id, final String typeName, String dataSource, @Nullable Interval interval)
|
static String getOrMakeId(@Nullable String id, final String typeName, String dataSource, @Nullable Interval interval)
|
||||||
{
|
{
|
||||||
if (id != null) {
|
if (id != null) {
|
||||||
return id;
|
return id;
|
||||||
}
|
}
|
||||||
|
|
||||||
final List<Object> objects = new ArrayList<>();
|
return IdUtils.newTaskId(typeName, dataSource, interval);
|
||||||
final String suffix = IdUtils.getRandomId();
|
|
||||||
objects.add(typeName);
|
|
||||||
objects.add(dataSource);
|
|
||||||
objects.add(suffix);
|
|
||||||
if (interval != null) {
|
|
||||||
objects.add(interval.getStart());
|
|
||||||
objects.add(interval.getEnd());
|
|
||||||
}
|
|
||||||
objects.add(DateTimes.nowUtc().toString());
|
|
||||||
|
|
||||||
return joinId(objects);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
@JsonProperty
|
@JsonProperty
|
||||||
|
@ -175,23 +159,6 @@ public abstract class AbstractTask implements Task
|
||||||
'}';
|
'}';
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Start helper methods
|
|
||||||
*
|
|
||||||
* @param objects objects to join
|
|
||||||
*
|
|
||||||
* @return string of joined objects
|
|
||||||
*/
|
|
||||||
static String joinId(List<Object> objects)
|
|
||||||
{
|
|
||||||
return ID_JOINER.join(objects);
|
|
||||||
}
|
|
||||||
|
|
||||||
static String joinId(Object... objects)
|
|
||||||
{
|
|
||||||
return ID_JOINER.join(objects);
|
|
||||||
}
|
|
||||||
|
|
||||||
public TaskStatus success()
|
public TaskStatus success()
|
||||||
{
|
{
|
||||||
return TaskStatus.success(getId());
|
return TaskStatus.success(getId());
|
||||||
|
|
|
@ -183,8 +183,8 @@ public class CompactionTask extends AbstractBatchIndexTask
|
||||||
|
|
||||||
@JsonCreator
|
@JsonCreator
|
||||||
public CompactionTask(
|
public CompactionTask(
|
||||||
@JsonProperty("id") final String id,
|
@JsonProperty("id") @Nullable final String id,
|
||||||
@JsonProperty("resource") final TaskResource taskResource,
|
@JsonProperty("resource") @Nullable final TaskResource taskResource,
|
||||||
@JsonProperty("dataSource") final String dataSource,
|
@JsonProperty("dataSource") final String dataSource,
|
||||||
@JsonProperty("interval") @Deprecated @Nullable final Interval interval,
|
@JsonProperty("interval") @Deprecated @Nullable final Interval interval,
|
||||||
@JsonProperty("segments") @Deprecated @Nullable final List<DataSegment> segments,
|
@JsonProperty("segments") @Deprecated @Nullable final List<DataSegment> segments,
|
||||||
|
|
|
@ -26,11 +26,6 @@ import com.google.common.util.concurrent.FutureCallback;
|
||||||
import com.google.common.util.concurrent.Futures;
|
import com.google.common.util.concurrent.Futures;
|
||||||
import com.google.common.util.concurrent.ListenableFuture;
|
import com.google.common.util.concurrent.ListenableFuture;
|
||||||
import org.apache.druid.client.indexing.IndexingServiceClient;
|
import org.apache.druid.client.indexing.IndexingServiceClient;
|
||||||
import org.apache.druid.data.input.InputFormat;
|
|
||||||
import org.apache.druid.data.input.InputSource;
|
|
||||||
import org.apache.druid.data.input.InputSplit;
|
|
||||||
import org.apache.druid.data.input.SplitHintSpec;
|
|
||||||
import org.apache.druid.data.input.impl.SplittableInputSource;
|
|
||||||
import org.apache.druid.indexer.TaskState;
|
import org.apache.druid.indexer.TaskState;
|
||||||
import org.apache.druid.indexer.TaskStatusPlus;
|
import org.apache.druid.indexer.TaskStatusPlus;
|
||||||
import org.apache.druid.indexing.common.TaskToolbox;
|
import org.apache.druid.indexing.common.TaskToolbox;
|
||||||
|
@ -53,7 +48,6 @@ import java.util.concurrent.BlockingQueue;
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
import java.util.concurrent.LinkedBlockingDeque;
|
import java.util.concurrent.LinkedBlockingDeque;
|
||||||
import java.util.concurrent.TimeUnit;
|
import java.util.concurrent.TimeUnit;
|
||||||
import java.util.stream.Collectors;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Base class for different implementations of {@link ParallelIndexTaskRunner}.
|
* Base class for different implementations of {@link ParallelIndexTaskRunner}.
|
||||||
|
@ -195,8 +189,8 @@ public abstract class ParallelIndexPhaseRunner<SubTaskType extends Task, SubTask
|
||||||
if (lastStatus != null) {
|
if (lastStatus != null) {
|
||||||
LOG.error("Failed because of the failed sub task[%s]", lastStatus.getId());
|
LOG.error("Failed because of the failed sub task[%s]", lastStatus.getId());
|
||||||
} else {
|
} else {
|
||||||
final SinglePhaseSubTaskSpec spec = (SinglePhaseSubTaskSpec) taskCompleteEvent.getSpec();
|
final SubTaskSpec<?> spec = taskCompleteEvent.getSpec();
|
||||||
LOG.error("Failed to run sub tasks for inputSplits[%s]", spec.getInputSplit());
|
LOG.error("Failed to process spec[%s] with an unknown last status", spec.getId());
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
|
@ -252,7 +246,7 @@ public abstract class ParallelIndexPhaseRunner<SubTaskType extends Task, SubTask
|
||||||
SubTaskSpec<SubTaskType> spec
|
SubTaskSpec<SubTaskType> spec
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
LOG.info("Submit a new task for spec[%s] and inputSplit[%s]", spec.getId(), spec.getInputSplit());
|
LOG.info("Submit a new task for spec[%s]", spec.getId());
|
||||||
final ListenableFuture<SubTaskCompleteEvent<SubTaskType>> future = taskMonitor.submit(spec);
|
final ListenableFuture<SubTaskCompleteEvent<SubTaskType>> future = taskMonitor.submit(spec);
|
||||||
Futures.addCallback(
|
Futures.addCallback(
|
||||||
future,
|
future,
|
||||||
|
@ -269,27 +263,13 @@ public abstract class ParallelIndexPhaseRunner<SubTaskType extends Task, SubTask
|
||||||
public void onFailure(Throwable t)
|
public void onFailure(Throwable t)
|
||||||
{
|
{
|
||||||
// this callback is called only when there were some problems in TaskMonitor.
|
// this callback is called only when there were some problems in TaskMonitor.
|
||||||
LOG.error(t, "Error while running a task for subTaskSpec[%s]", spec);
|
LOG.error(t, "Error while running a task for spec[%s]", spec.getId());
|
||||||
taskCompleteEvents.offer(SubTaskCompleteEvent.fail(spec, t));
|
taskCompleteEvents.offer(SubTaskCompleteEvent.fail(spec, t));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static List<InputSplit> getSplitsIfSplittable(
|
|
||||||
InputSource inputSource,
|
|
||||||
InputFormat inputFormat,
|
|
||||||
@Nullable SplitHintSpec splitHintSpec
|
|
||||||
) throws IOException
|
|
||||||
{
|
|
||||||
if (inputSource instanceof SplittableInputSource) {
|
|
||||||
final SplittableInputSource<?> splittableInputSource = (SplittableInputSource) inputSource;
|
|
||||||
return splittableInputSource.createSplits(inputFormat, splitHintSpec).collect(Collectors.toList());
|
|
||||||
} else {
|
|
||||||
throw new ISE("inputSource[%s] is not splittable", inputSource.getClass().getSimpleName());
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void stopGracefully()
|
public void stopGracefully()
|
||||||
{
|
{
|
||||||
|
|
|
@ -39,7 +39,6 @@ import org.apache.druid.indexer.TaskState;
|
||||||
import org.apache.druid.indexer.TaskStatus;
|
import org.apache.druid.indexer.TaskStatus;
|
||||||
import org.apache.druid.indexer.partitions.PartitionsSpec;
|
import org.apache.druid.indexer.partitions.PartitionsSpec;
|
||||||
import org.apache.druid.indexer.partitions.SingleDimensionPartitionsSpec;
|
import org.apache.druid.indexer.partitions.SingleDimensionPartitionsSpec;
|
||||||
import org.apache.druid.indexing.appenderator.ActionBasedUsedSegmentChecker;
|
|
||||||
import org.apache.druid.indexing.common.Counters;
|
import org.apache.druid.indexing.common.Counters;
|
||||||
import org.apache.druid.indexing.common.TaskLock;
|
import org.apache.druid.indexing.common.TaskLock;
|
||||||
import org.apache.druid.indexing.common.TaskLockType;
|
import org.apache.druid.indexing.common.TaskLockType;
|
||||||
|
@ -73,7 +72,6 @@ import org.apache.druid.segment.indexing.granularity.GranularitySpec;
|
||||||
import org.apache.druid.segment.realtime.appenderator.AppenderatorsManager;
|
import org.apache.druid.segment.realtime.appenderator.AppenderatorsManager;
|
||||||
import org.apache.druid.segment.realtime.appenderator.SegmentIdWithShardSpec;
|
import org.apache.druid.segment.realtime.appenderator.SegmentIdWithShardSpec;
|
||||||
import org.apache.druid.segment.realtime.appenderator.TransactionalSegmentPublisher;
|
import org.apache.druid.segment.realtime.appenderator.TransactionalSegmentPublisher;
|
||||||
import org.apache.druid.segment.realtime.appenderator.UsedSegmentChecker;
|
|
||||||
import org.apache.druid.segment.realtime.firehose.ChatHandler;
|
import org.apache.druid.segment.realtime.firehose.ChatHandler;
|
||||||
import org.apache.druid.segment.realtime.firehose.ChatHandlerProvider;
|
import org.apache.druid.segment.realtime.firehose.ChatHandlerProvider;
|
||||||
import org.apache.druid.segment.realtime.firehose.ChatHandlers;
|
import org.apache.druid.segment.realtime.firehose.ChatHandlers;
|
||||||
|
@ -769,7 +767,6 @@ public class ParallelIndexSupervisorTask extends AbstractBatchIndexTask implemen
|
||||||
private static void publishSegments(TaskToolbox toolbox, Map<String, PushedSegmentsReport> reportsMap)
|
private static void publishSegments(TaskToolbox toolbox, Map<String, PushedSegmentsReport> reportsMap)
|
||||||
throws IOException
|
throws IOException
|
||||||
{
|
{
|
||||||
final UsedSegmentChecker usedSegmentChecker = new ActionBasedUsedSegmentChecker(toolbox.getTaskActionClient());
|
|
||||||
final Set<DataSegment> oldSegments = new HashSet<>();
|
final Set<DataSegment> oldSegments = new HashSet<>();
|
||||||
final Set<DataSegment> newSegments = new HashSet<>();
|
final Set<DataSegment> newSegments = new HashSet<>();
|
||||||
reportsMap
|
reportsMap
|
||||||
|
@ -788,18 +785,7 @@ public class ParallelIndexSupervisorTask extends AbstractBatchIndexTask implemen
|
||||||
if (published) {
|
if (published) {
|
||||||
LOG.info("Published [%d] segments", newSegments.size());
|
LOG.info("Published [%d] segments", newSegments.size());
|
||||||
} else {
|
} else {
|
||||||
LOG.info("Transaction failure while publishing segments, checking if someone else beat us to it.");
|
throw new ISE("Failed to publish segments");
|
||||||
final Set<SegmentIdWithShardSpec> segmentsIdentifiers = reportsMap
|
|
||||||
.values()
|
|
||||||
.stream()
|
|
||||||
.flatMap(report -> report.getNewSegments().stream())
|
|
||||||
.map(SegmentIdWithShardSpec::fromDataSegment)
|
|
||||||
.collect(Collectors.toSet());
|
|
||||||
if (usedSegmentChecker.findUsedSegments(segmentsIdentifiers).equals(newSegments)) {
|
|
||||||
LOG.info("Our segments really do exist, awaiting handoff.");
|
|
||||||
} else {
|
|
||||||
throw new ISE("Failed to publish segments[%s]", newSegments);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -277,13 +277,13 @@ public class TaskMonitor<T extends Task>
|
||||||
{
|
{
|
||||||
T task = spec.newSubTask(numAttempts);
|
T task = spec.newSubTask(numAttempts);
|
||||||
try {
|
try {
|
||||||
indexingServiceClient.runTask(task);
|
indexingServiceClient.runTask(task.getId(), task);
|
||||||
}
|
}
|
||||||
catch (Exception e) {
|
catch (Exception e) {
|
||||||
if (isUnknownTypeIdException(e)) {
|
if (isUnknownTypeIdException(e)) {
|
||||||
log.warn(e, "Got an unknown type id error. Retrying with a backward compatible type.");
|
log.warn(e, "Got an unknown type id error. Retrying with a backward compatible type.");
|
||||||
task = spec.newSubTaskWithBackwardCompatibleType(numAttempts);
|
task = spec.newSubTaskWithBackwardCompatibleType(numAttempts);
|
||||||
indexingServiceClient.runTask(task);
|
indexingServiceClient.runTask(task.getId(), task);
|
||||||
} else {
|
} else {
|
||||||
throw e;
|
throw e;
|
||||||
}
|
}
|
||||||
|
|
|
@ -23,17 +23,22 @@ import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import com.fasterxml.jackson.databind.introspect.AnnotationIntrospectorPair;
|
import com.fasterxml.jackson.databind.introspect.AnnotationIntrospectorPair;
|
||||||
import com.fasterxml.jackson.databind.jsontype.NamedType;
|
import com.fasterxml.jackson.databind.jsontype.NamedType;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
|
import com.google.common.collect.ImmutableMap;
|
||||||
import org.apache.druid.client.coordinator.CoordinatorClient;
|
import org.apache.druid.client.coordinator.CoordinatorClient;
|
||||||
import org.apache.druid.client.indexing.ClientCompactionIOConfig;
|
import org.apache.druid.client.indexing.ClientCompactionIOConfig;
|
||||||
import org.apache.druid.client.indexing.ClientCompactionIntervalSpec;
|
import org.apache.druid.client.indexing.ClientCompactionIntervalSpec;
|
||||||
import org.apache.druid.client.indexing.ClientCompactionTaskQuery;
|
import org.apache.druid.client.indexing.ClientCompactionTaskQuery;
|
||||||
import org.apache.druid.client.indexing.ClientCompactionTaskQueryTuningConfig;
|
import org.apache.druid.client.indexing.ClientCompactionTaskQueryTuningConfig;
|
||||||
|
import org.apache.druid.client.indexing.ClientTaskQuery;
|
||||||
import org.apache.druid.client.indexing.IndexingServiceClient;
|
import org.apache.druid.client.indexing.IndexingServiceClient;
|
||||||
import org.apache.druid.client.indexing.NoopIndexingServiceClient;
|
import org.apache.druid.client.indexing.NoopIndexingServiceClient;
|
||||||
import org.apache.druid.data.input.SegmentsSplitHintSpec;
|
import org.apache.druid.data.input.SegmentsSplitHintSpec;
|
||||||
import org.apache.druid.guice.GuiceAnnotationIntrospector;
|
import org.apache.druid.guice.GuiceAnnotationIntrospector;
|
||||||
import org.apache.druid.guice.GuiceInjectableValues;
|
import org.apache.druid.guice.GuiceInjectableValues;
|
||||||
import org.apache.druid.guice.GuiceInjectors;
|
import org.apache.druid.guice.GuiceInjectors;
|
||||||
|
import org.apache.druid.indexer.partitions.DynamicPartitionsSpec;
|
||||||
|
import org.apache.druid.indexing.common.RetryPolicyConfig;
|
||||||
|
import org.apache.druid.indexing.common.RetryPolicyFactory;
|
||||||
import org.apache.druid.indexing.common.SegmentLoaderFactory;
|
import org.apache.druid.indexing.common.SegmentLoaderFactory;
|
||||||
import org.apache.druid.indexing.common.TestUtils;
|
import org.apache.druid.indexing.common.TestUtils;
|
||||||
import org.apache.druid.indexing.common.stats.RowIngestionMetersFactory;
|
import org.apache.druid.indexing.common.stats.RowIngestionMetersFactory;
|
||||||
|
@ -63,10 +68,11 @@ public class ClientCompactionTaskQuerySerdeTest
|
||||||
private static final AppenderatorsManager APPENDERATORS_MANAGER = new TestAppenderatorsManager();
|
private static final AppenderatorsManager APPENDERATORS_MANAGER = new TestAppenderatorsManager();
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testSerde() throws IOException
|
public void testClientCompactionTaskQueryToCompactionTask() throws IOException
|
||||||
{
|
{
|
||||||
final ObjectMapper mapper = setupInjectablesInObjectMapper(new DefaultObjectMapper());
|
final ObjectMapper mapper = setupInjectablesInObjectMapper(new DefaultObjectMapper());
|
||||||
final ClientCompactionTaskQuery query = new ClientCompactionTaskQuery(
|
final ClientCompactionTaskQuery query = new ClientCompactionTaskQuery(
|
||||||
|
"id",
|
||||||
"datasource",
|
"datasource",
|
||||||
new ClientCompactionIOConfig(
|
new ClientCompactionIOConfig(
|
||||||
new ClientCompactionIntervalSpec(
|
new ClientCompactionIntervalSpec(
|
||||||
|
@ -90,12 +96,13 @@ public class ClientCompactionTaskQuerySerdeTest
|
||||||
1000L,
|
1000L,
|
||||||
100
|
100
|
||||||
),
|
),
|
||||||
new HashMap<>()
|
ImmutableMap.of("key", "value")
|
||||||
);
|
);
|
||||||
|
|
||||||
final byte[] json = mapper.writeValueAsBytes(query);
|
final byte[] json = mapper.writeValueAsBytes(query);
|
||||||
final CompactionTask task = (CompactionTask) mapper.readValue(json, Task.class);
|
final CompactionTask task = (CompactionTask) mapper.readValue(json, Task.class);
|
||||||
|
|
||||||
|
Assert.assertEquals(query.getId(), task.getId());
|
||||||
Assert.assertEquals(query.getDataSource(), task.getDataSource());
|
Assert.assertEquals(query.getDataSource(), task.getDataSource());
|
||||||
Assert.assertTrue(task.getIoConfig().getInputSpec() instanceof CompactionIntervalSpec);
|
Assert.assertTrue(task.getIoConfig().getInputSpec() instanceof CompactionIntervalSpec);
|
||||||
Assert.assertEquals(
|
Assert.assertEquals(
|
||||||
|
@ -141,6 +148,95 @@ public class ClientCompactionTaskQuerySerdeTest
|
||||||
Assert.assertEquals(query.getContext(), task.getContext());
|
Assert.assertEquals(query.getContext(), task.getContext());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testCompactionTaskToClientCompactionTaskQuery() throws IOException
|
||||||
|
{
|
||||||
|
final ObjectMapper mapper = setupInjectablesInObjectMapper(new DefaultObjectMapper());
|
||||||
|
final CompactionTask.Builder builder = new CompactionTask.Builder(
|
||||||
|
"datasource",
|
||||||
|
mapper,
|
||||||
|
AuthTestUtils.TEST_AUTHORIZER_MAPPER,
|
||||||
|
new NoopChatHandlerProvider(),
|
||||||
|
ROW_INGESTION_METERS_FACTORY,
|
||||||
|
new NoopIndexingServiceClient(),
|
||||||
|
COORDINATOR_CLIENT,
|
||||||
|
new SegmentLoaderFactory(null, mapper),
|
||||||
|
new RetryPolicyFactory(new RetryPolicyConfig()),
|
||||||
|
APPENDERATORS_MANAGER
|
||||||
|
);
|
||||||
|
final CompactionTask task = builder
|
||||||
|
.inputSpec(new CompactionIntervalSpec(Intervals.of("2019/2020"), "testSha256OfSortedSegmentIds"))
|
||||||
|
.tuningConfig(
|
||||||
|
new ParallelIndexTuningConfig(
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
40000,
|
||||||
|
2000L,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
new SegmentsSplitHintSpec(100000L),
|
||||||
|
new DynamicPartitionsSpec(100, 30000L),
|
||||||
|
new IndexSpec(
|
||||||
|
new DefaultBitmapSerdeFactory(),
|
||||||
|
CompressionStrategy.LZ4,
|
||||||
|
CompressionStrategy.LZF,
|
||||||
|
LongEncodingStrategy.LONGS
|
||||||
|
),
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
1000L,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
100,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null,
|
||||||
|
null
|
||||||
|
)
|
||||||
|
)
|
||||||
|
.build();
|
||||||
|
|
||||||
|
final ClientCompactionTaskQuery expected = new ClientCompactionTaskQuery(
|
||||||
|
task.getId(),
|
||||||
|
"datasource",
|
||||||
|
new ClientCompactionIOConfig(
|
||||||
|
new ClientCompactionIntervalSpec(
|
||||||
|
Intervals.of("2019/2020"),
|
||||||
|
"testSha256OfSortedSegmentIds"
|
||||||
|
)
|
||||||
|
),
|
||||||
|
new ClientCompactionTaskQueryTuningConfig(
|
||||||
|
100,
|
||||||
|
40000,
|
||||||
|
2000L,
|
||||||
|
30000L,
|
||||||
|
new SegmentsSplitHintSpec(100000L),
|
||||||
|
new IndexSpec(
|
||||||
|
new DefaultBitmapSerdeFactory(),
|
||||||
|
CompressionStrategy.LZ4,
|
||||||
|
CompressionStrategy.LZF,
|
||||||
|
LongEncodingStrategy.LONGS
|
||||||
|
),
|
||||||
|
0,
|
||||||
|
1000L,
|
||||||
|
100
|
||||||
|
),
|
||||||
|
new HashMap<>()
|
||||||
|
);
|
||||||
|
|
||||||
|
final byte[] json = mapper.writeValueAsBytes(task);
|
||||||
|
final ClientCompactionTaskQuery actual = (ClientCompactionTaskQuery) mapper.readValue(json, ClientTaskQuery.class);
|
||||||
|
|
||||||
|
Assert.assertEquals(expected, actual);
|
||||||
|
}
|
||||||
|
|
||||||
private static ObjectMapper setupInjectablesInObjectMapper(ObjectMapper objectMapper)
|
private static ObjectMapper setupInjectablesInObjectMapper(ObjectMapper objectMapper)
|
||||||
{
|
{
|
||||||
final GuiceAnnotationIntrospector guiceIntrospector = new GuiceAnnotationIntrospector();
|
final GuiceAnnotationIntrospector guiceIntrospector = new GuiceAnnotationIntrospector();
|
||||||
|
|
|
@ -0,0 +1,80 @@
|
||||||
|
/*
|
||||||
|
* Licensed to the Apache Software Foundation (ASF) under one
|
||||||
|
* or more contributor license agreements. See the NOTICE file
|
||||||
|
* distributed with this work for additional information
|
||||||
|
* regarding copyright ownership. The ASF licenses this file
|
||||||
|
* to you under the Apache License, Version 2.0 (the
|
||||||
|
* "License"); you may not use this file except in compliance
|
||||||
|
* with the License. You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing,
|
||||||
|
* software distributed under the License is distributed on an
|
||||||
|
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
|
||||||
|
* KIND, either express or implied. See the License for the
|
||||||
|
* specific language governing permissions and limitations
|
||||||
|
* under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
package org.apache.druid.indexing.common.task;
|
||||||
|
|
||||||
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
|
import com.fasterxml.jackson.databind.jsontype.NamedType;
|
||||||
|
import org.apache.druid.client.indexing.ClientKillUnusedSegmentsTaskQuery;
|
||||||
|
import org.apache.druid.client.indexing.ClientTaskQuery;
|
||||||
|
import org.apache.druid.jackson.DefaultObjectMapper;
|
||||||
|
import org.apache.druid.java.util.common.Intervals;
|
||||||
|
import org.junit.Assert;
|
||||||
|
import org.junit.Before;
|
||||||
|
import org.junit.Test;
|
||||||
|
|
||||||
|
import java.io.IOException;
|
||||||
|
|
||||||
|
public class ClientKillUnusedSegmentsTaskQuerySerdeTest
|
||||||
|
{
|
||||||
|
private ObjectMapper objectMapper;
|
||||||
|
|
||||||
|
@Before
|
||||||
|
public void setup()
|
||||||
|
{
|
||||||
|
objectMapper = new DefaultObjectMapper();
|
||||||
|
objectMapper.registerSubtypes(
|
||||||
|
new NamedType(ClientKillUnusedSegmentsTaskQuery.class, ClientKillUnusedSegmentsTaskQuery.TYPE)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testClientKillUnusedSegmentsTaskQueryToKillUnusedSegmentsTask() throws IOException
|
||||||
|
{
|
||||||
|
final ClientKillUnusedSegmentsTaskQuery taskQuery = new ClientKillUnusedSegmentsTaskQuery(
|
||||||
|
"killTaskId",
|
||||||
|
"datasource",
|
||||||
|
Intervals.of("2020-01-01/P1D")
|
||||||
|
);
|
||||||
|
final byte[] json = objectMapper.writeValueAsBytes(taskQuery);
|
||||||
|
final KillUnusedSegmentsTask fromJson = (KillUnusedSegmentsTask) objectMapper.readValue(json, Task.class);
|
||||||
|
Assert.assertEquals(taskQuery.getId(), fromJson.getId());
|
||||||
|
Assert.assertEquals(taskQuery.getDataSource(), fromJson.getDataSource());
|
||||||
|
Assert.assertEquals(taskQuery.getInterval(), fromJson.getInterval());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testKillUnusedSegmentsTaskToClientKillUnusedSegmentsTaskQuery() throws IOException
|
||||||
|
{
|
||||||
|
final KillUnusedSegmentsTask task = new KillUnusedSegmentsTask(
|
||||||
|
null,
|
||||||
|
"datasource",
|
||||||
|
Intervals.of("2020-01-01/P1D"),
|
||||||
|
null
|
||||||
|
);
|
||||||
|
final byte[] json = objectMapper.writeValueAsBytes(task);
|
||||||
|
final ClientKillUnusedSegmentsTaskQuery taskQuery = (ClientKillUnusedSegmentsTaskQuery) objectMapper.readValue(
|
||||||
|
json,
|
||||||
|
ClientTaskQuery.class
|
||||||
|
);
|
||||||
|
Assert.assertEquals(task.getId(), taskQuery.getId());
|
||||||
|
Assert.assertEquals(task.getDataSource(), taskQuery.getDataSource());
|
||||||
|
Assert.assertEquals(task.getInterval(), taskQuery.getInterval());
|
||||||
|
}
|
||||||
|
}
|
|
@ -24,7 +24,6 @@ import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import com.fasterxml.jackson.databind.jsontype.NamedType;
|
import com.fasterxml.jackson.databind.jsontype.NamedType;
|
||||||
import com.google.common.collect.ImmutableList;
|
import com.google.common.collect.ImmutableList;
|
||||||
import com.google.common.collect.ImmutableMap;
|
import com.google.common.collect.ImmutableMap;
|
||||||
import org.apache.druid.client.indexing.ClientKillUnusedSegmentsTaskQuery;
|
|
||||||
import org.apache.druid.data.input.impl.DimensionsSpec;
|
import org.apache.druid.data.input.impl.DimensionsSpec;
|
||||||
import org.apache.druid.data.input.impl.LocalInputSource;
|
import org.apache.druid.data.input.impl.LocalInputSource;
|
||||||
import org.apache.druid.data.input.impl.NoopInputFormat;
|
import org.apache.druid.data.input.impl.NoopInputFormat;
|
||||||
|
@ -383,42 +382,6 @@ public class TaskSerdeTest
|
||||||
Assert.assertTrue(task2.getIngestionSchema().getIOConfig().getInputSource() instanceof LocalInputSource);
|
Assert.assertTrue(task2.getIngestionSchema().getIOConfig().getInputSource() instanceof LocalInputSource);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Test
|
|
||||||
public void testKillTaskSerde() throws Exception
|
|
||||||
{
|
|
||||||
final KillUnusedSegmentsTask task = new KillUnusedSegmentsTask(
|
|
||||||
null,
|
|
||||||
"foo",
|
|
||||||
Intervals.of("2010-01-01/P1D"),
|
|
||||||
null
|
|
||||||
);
|
|
||||||
|
|
||||||
final String json = jsonMapper.writeValueAsString(task);
|
|
||||||
|
|
||||||
Thread.sleep(100); // Just want to run the clock a bit to make sure the task id doesn't change
|
|
||||||
final KillUnusedSegmentsTask task2 = (KillUnusedSegmentsTask) jsonMapper.readValue(json, Task.class);
|
|
||||||
|
|
||||||
Assert.assertEquals("foo", task.getDataSource());
|
|
||||||
Assert.assertEquals(Intervals.of("2010-01-01/P1D"), task.getInterval());
|
|
||||||
|
|
||||||
Assert.assertEquals(task.getId(), task2.getId());
|
|
||||||
Assert.assertEquals(task.getGroupId(), task2.getGroupId());
|
|
||||||
Assert.assertEquals(task.getDataSource(), task2.getDataSource());
|
|
||||||
Assert.assertEquals(task.getInterval(), task2.getInterval());
|
|
||||||
|
|
||||||
final KillUnusedSegmentsTask task3 = (KillUnusedSegmentsTask) jsonMapper.readValue(
|
|
||||||
jsonMapper.writeValueAsString(
|
|
||||||
new ClientKillUnusedSegmentsTaskQuery(
|
|
||||||
"foo",
|
|
||||||
Intervals.of("2010-01-01/P1D")
|
|
||||||
)
|
|
||||||
), Task.class
|
|
||||||
);
|
|
||||||
|
|
||||||
Assert.assertEquals("foo", task3.getDataSource());
|
|
||||||
Assert.assertEquals(Intervals.of("2010-01-01/P1D"), task3.getInterval());
|
|
||||||
}
|
|
||||||
|
|
||||||
@Test
|
@Test
|
||||||
public void testRealtimeIndexTaskSerde() throws Exception
|
public void testRealtimeIndexTaskSerde() throws Exception
|
||||||
{
|
{
|
||||||
|
|
|
@ -454,7 +454,7 @@ public class AbstractParallelIndexSupervisorTaskTest extends IngestionTestBase
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String runTask(Object taskObject)
|
public String runTask(String taskId, Object taskObject)
|
||||||
{
|
{
|
||||||
final Task task = (Task) taskObject;
|
final Task task = (Task) taskObject;
|
||||||
return taskRunner.run(injectIfNeeded(task));
|
return taskRunner.run(injectIfNeeded(task));
|
||||||
|
|
|
@ -83,7 +83,7 @@ public class ParallelIndexSupervisorTaskKillTest extends AbstractParallelIndexSu
|
||||||
false
|
false
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
getIndexingServiceClient().runTask(task);
|
getIndexingServiceClient().runTask(task.getId(), task);
|
||||||
while (task.getCurrentRunner() == null) {
|
while (task.getCurrentRunner() == null) {
|
||||||
Thread.sleep(100);
|
Thread.sleep(100);
|
||||||
}
|
}
|
||||||
|
|
|
@ -129,7 +129,7 @@ public class ParallelIndexSupervisorTaskResourceTest extends AbstractParallelInd
|
||||||
false
|
false
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
getIndexingServiceClient().runTask(task);
|
getIndexingServiceClient().runTask(task.getId(), task);
|
||||||
Thread.sleep(1000);
|
Thread.sleep(1000);
|
||||||
|
|
||||||
final SinglePhaseParallelIndexTaskRunner runner = (SinglePhaseParallelIndexTaskRunner) task.getCurrentRunner();
|
final SinglePhaseParallelIndexTaskRunner runner = (SinglePhaseParallelIndexTaskRunner) task.getCurrentRunner();
|
||||||
|
|
|
@ -245,7 +245,7 @@ public class TaskMonitorTest
|
||||||
private class TestIndexingServiceClient extends NoopIndexingServiceClient
|
private class TestIndexingServiceClient extends NoopIndexingServiceClient
|
||||||
{
|
{
|
||||||
@Override
|
@Override
|
||||||
public String runTask(Object taskObject)
|
public String runTask(String taskId, Object taskObject)
|
||||||
{
|
{
|
||||||
final TestTask task = (TestTask) taskObject;
|
final TestTask task = (TestTask) taskObject;
|
||||||
tasks.put(task.getId(), TaskState.RUNNING);
|
tasks.put(task.getId(), TaskState.RUNNING);
|
||||||
|
|
|
@ -21,6 +21,7 @@ package org.apache.druid.client.indexing;
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import com.google.common.base.Preconditions;
|
||||||
|
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
|
@ -31,6 +32,9 @@ import java.util.Objects;
|
||||||
*/
|
*/
|
||||||
public class ClientCompactionTaskQuery implements ClientTaskQuery
|
public class ClientCompactionTaskQuery implements ClientTaskQuery
|
||||||
{
|
{
|
||||||
|
static final String TYPE = "compact";
|
||||||
|
|
||||||
|
private final String id;
|
||||||
private final String dataSource;
|
private final String dataSource;
|
||||||
private final ClientCompactionIOConfig ioConfig;
|
private final ClientCompactionIOConfig ioConfig;
|
||||||
private final ClientCompactionTaskQueryTuningConfig tuningConfig;
|
private final ClientCompactionTaskQueryTuningConfig tuningConfig;
|
||||||
|
@ -38,23 +42,32 @@ public class ClientCompactionTaskQuery implements ClientTaskQuery
|
||||||
|
|
||||||
@JsonCreator
|
@JsonCreator
|
||||||
public ClientCompactionTaskQuery(
|
public ClientCompactionTaskQuery(
|
||||||
|
@JsonProperty("id") String id,
|
||||||
@JsonProperty("dataSource") String dataSource,
|
@JsonProperty("dataSource") String dataSource,
|
||||||
@JsonProperty("ioConfig") ClientCompactionIOConfig ioConfig,
|
@JsonProperty("ioConfig") ClientCompactionIOConfig ioConfig,
|
||||||
@JsonProperty("tuningConfig") ClientCompactionTaskQueryTuningConfig tuningConfig,
|
@JsonProperty("tuningConfig") ClientCompactionTaskQueryTuningConfig tuningConfig,
|
||||||
@JsonProperty("context") Map<String, Object> context
|
@JsonProperty("context") Map<String, Object> context
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
|
this.id = Preconditions.checkNotNull(id, "id");
|
||||||
this.dataSource = dataSource;
|
this.dataSource = dataSource;
|
||||||
this.ioConfig = ioConfig;
|
this.ioConfig = ioConfig;
|
||||||
this.tuningConfig = tuningConfig;
|
this.tuningConfig = tuningConfig;
|
||||||
this.context = context;
|
this.context = context;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@JsonProperty
|
||||||
|
@Override
|
||||||
|
public String getId()
|
||||||
|
{
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
@JsonProperty
|
@JsonProperty
|
||||||
@Override
|
@Override
|
||||||
public String getType()
|
public String getType()
|
||||||
{
|
{
|
||||||
return "compact";
|
return TYPE;
|
||||||
}
|
}
|
||||||
|
|
||||||
@JsonProperty
|
@JsonProperty
|
||||||
|
@ -92,7 +105,8 @@ public class ClientCompactionTaskQuery implements ClientTaskQuery
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
ClientCompactionTaskQuery that = (ClientCompactionTaskQuery) o;
|
ClientCompactionTaskQuery that = (ClientCompactionTaskQuery) o;
|
||||||
return Objects.equals(dataSource, that.dataSource) &&
|
return Objects.equals(id, that.id) &&
|
||||||
|
Objects.equals(dataSource, that.dataSource) &&
|
||||||
Objects.equals(ioConfig, that.ioConfig) &&
|
Objects.equals(ioConfig, that.ioConfig) &&
|
||||||
Objects.equals(tuningConfig, that.tuningConfig) &&
|
Objects.equals(tuningConfig, that.tuningConfig) &&
|
||||||
Objects.equals(context, that.context);
|
Objects.equals(context, that.context);
|
||||||
|
@ -101,14 +115,15 @@ public class ClientCompactionTaskQuery implements ClientTaskQuery
|
||||||
@Override
|
@Override
|
||||||
public int hashCode()
|
public int hashCode()
|
||||||
{
|
{
|
||||||
return Objects.hash(dataSource, ioConfig, tuningConfig, context);
|
return Objects.hash(id, dataSource, ioConfig, tuningConfig, context);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String toString()
|
public String toString()
|
||||||
{
|
{
|
||||||
return "ClientCompactQuery{" +
|
return "ClientCompactionTaskQuery{" +
|
||||||
"dataSource='" + dataSource + '\'' +
|
"id='" + id + '\'' +
|
||||||
|
", dataSource='" + dataSource + '\'' +
|
||||||
", ioConfig=" + ioConfig +
|
", ioConfig=" + ioConfig +
|
||||||
", tuningConfig=" + tuningConfig +
|
", tuningConfig=" + tuningConfig +
|
||||||
", context=" + context +
|
", context=" + context +
|
||||||
|
|
|
@ -21,8 +21,11 @@ package org.apache.druid.client.indexing;
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonCreator;
|
import com.fasterxml.jackson.annotation.JsonCreator;
|
||||||
import com.fasterxml.jackson.annotation.JsonProperty;
|
import com.fasterxml.jackson.annotation.JsonProperty;
|
||||||
|
import com.google.common.base.Preconditions;
|
||||||
import org.joda.time.Interval;
|
import org.joda.time.Interval;
|
||||||
|
|
||||||
|
import java.util.Objects;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Client representation of org.apache.druid.indexing.common.task.KillUnusedSegmentsTask. JSON searialization
|
* Client representation of org.apache.druid.indexing.common.task.KillUnusedSegmentsTask. JSON searialization
|
||||||
* fields of this class must correspond to those of org.apache.druid.indexing.common.task.KillUnusedSegmentsTask, except
|
* fields of this class must correspond to those of org.apache.druid.indexing.common.task.KillUnusedSegmentsTask, except
|
||||||
|
@ -30,24 +33,36 @@ import org.joda.time.Interval;
|
||||||
*/
|
*/
|
||||||
public class ClientKillUnusedSegmentsTaskQuery implements ClientTaskQuery
|
public class ClientKillUnusedSegmentsTaskQuery implements ClientTaskQuery
|
||||||
{
|
{
|
||||||
|
public static final String TYPE = "kill";
|
||||||
|
|
||||||
|
private final String id;
|
||||||
private final String dataSource;
|
private final String dataSource;
|
||||||
private final Interval interval;
|
private final Interval interval;
|
||||||
|
|
||||||
@JsonCreator
|
@JsonCreator
|
||||||
public ClientKillUnusedSegmentsTaskQuery(
|
public ClientKillUnusedSegmentsTaskQuery(
|
||||||
|
@JsonProperty("id") String id,
|
||||||
@JsonProperty("dataSource") String dataSource,
|
@JsonProperty("dataSource") String dataSource,
|
||||||
@JsonProperty("interval") Interval interval
|
@JsonProperty("interval") Interval interval
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
|
this.id = Preconditions.checkNotNull(id, "id");
|
||||||
this.dataSource = dataSource;
|
this.dataSource = dataSource;
|
||||||
this.interval = interval;
|
this.interval = interval;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@JsonProperty
|
||||||
|
@Override
|
||||||
|
public String getId()
|
||||||
|
{
|
||||||
|
return id;
|
||||||
|
}
|
||||||
|
|
||||||
@JsonProperty
|
@JsonProperty
|
||||||
@Override
|
@Override
|
||||||
public String getType()
|
public String getType()
|
||||||
{
|
{
|
||||||
return "kill";
|
return TYPE;
|
||||||
}
|
}
|
||||||
|
|
||||||
@JsonProperty
|
@JsonProperty
|
||||||
|
@ -62,4 +77,25 @@ public class ClientKillUnusedSegmentsTaskQuery implements ClientTaskQuery
|
||||||
{
|
{
|
||||||
return interval;
|
return interval;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public boolean equals(Object o)
|
||||||
|
{
|
||||||
|
if (this == o) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
if (o == null || getClass() != o.getClass()) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
ClientKillUnusedSegmentsTaskQuery that = (ClientKillUnusedSegmentsTaskQuery) o;
|
||||||
|
return Objects.equals(id, that.id) &&
|
||||||
|
Objects.equals(dataSource, that.dataSource) &&
|
||||||
|
Objects.equals(interval, that.interval);
|
||||||
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public int hashCode()
|
||||||
|
{
|
||||||
|
return Objects.hash(id, dataSource, interval);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,11 +33,13 @@ import com.fasterxml.jackson.annotation.JsonTypeInfo;
|
||||||
*/
|
*/
|
||||||
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
|
@JsonTypeInfo(use = JsonTypeInfo.Id.NAME, property = "type")
|
||||||
@JsonSubTypes(value = {
|
@JsonSubTypes(value = {
|
||||||
@Type(name = "kill", value = ClientKillUnusedSegmentsTaskQuery.class),
|
@Type(name = ClientKillUnusedSegmentsTaskQuery.TYPE, value = ClientKillUnusedSegmentsTaskQuery.class),
|
||||||
@Type(name = "compact", value = ClientCompactionTaskQuery.class)
|
@Type(name = ClientCompactionTaskQuery.TYPE, value = ClientCompactionTaskQuery.class)
|
||||||
})
|
})
|
||||||
public interface ClientTaskQuery
|
public interface ClientTaskQuery
|
||||||
{
|
{
|
||||||
|
String getId();
|
||||||
|
|
||||||
String getType();
|
String getType();
|
||||||
|
|
||||||
String getDataSource();
|
String getDataSource();
|
||||||
|
|
|
@ -25,6 +25,7 @@ import com.google.common.base.Preconditions;
|
||||||
import com.google.common.base.Strings;
|
import com.google.common.base.Strings;
|
||||||
import com.google.common.collect.Iterables;
|
import com.google.common.collect.Iterables;
|
||||||
import com.google.inject.Inject;
|
import com.google.inject.Inject;
|
||||||
|
import org.apache.druid.common.utils.IdUtils;
|
||||||
import org.apache.druid.discovery.DruidLeaderClient;
|
import org.apache.druid.discovery.DruidLeaderClient;
|
||||||
import org.apache.druid.indexer.TaskStatusPlus;
|
import org.apache.druid.indexer.TaskStatusPlus;
|
||||||
import org.apache.druid.java.util.common.DateTimes;
|
import org.apache.druid.java.util.common.DateTimes;
|
||||||
|
@ -65,13 +66,16 @@ public class HttpIndexingServiceClient implements IndexingServiceClient
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void killUnusedSegments(String dataSource, Interval interval)
|
public void killUnusedSegments(String idPrefix, String dataSource, Interval interval)
|
||||||
{
|
{
|
||||||
runTask(new ClientKillUnusedSegmentsTaskQuery(dataSource, interval));
|
final String taskId = IdUtils.newTaskId(idPrefix, ClientKillUnusedSegmentsTaskQuery.TYPE, dataSource, interval);
|
||||||
|
final ClientTaskQuery taskQuery = new ClientKillUnusedSegmentsTaskQuery(taskId, dataSource, interval);
|
||||||
|
runTask(taskId, taskQuery);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String compactSegments(
|
public String compactSegments(
|
||||||
|
String idPrefix,
|
||||||
List<DataSegment> segments,
|
List<DataSegment> segments,
|
||||||
int compactionTaskPriority,
|
int compactionTaskPriority,
|
||||||
ClientCompactionTaskQueryTuningConfig tuningConfig,
|
ClientCompactionTaskQueryTuningConfig tuningConfig,
|
||||||
|
@ -89,18 +93,19 @@ public class HttpIndexingServiceClient implements IndexingServiceClient
|
||||||
context = context == null ? new HashMap<>() : context;
|
context = context == null ? new HashMap<>() : context;
|
||||||
context.put("priority", compactionTaskPriority);
|
context.put("priority", compactionTaskPriority);
|
||||||
|
|
||||||
return runTask(
|
final String taskId = IdUtils.newTaskId(idPrefix, ClientCompactionTaskQuery.TYPE, dataSource, null);
|
||||||
new ClientCompactionTaskQuery(
|
final ClientTaskQuery taskQuery = new ClientCompactionTaskQuery(
|
||||||
dataSource,
|
taskId,
|
||||||
new ClientCompactionIOConfig(ClientCompactionIntervalSpec.fromSegments(segments)),
|
dataSource,
|
||||||
tuningConfig,
|
new ClientCompactionIOConfig(ClientCompactionIntervalSpec.fromSegments(segments)),
|
||||||
context
|
tuningConfig,
|
||||||
)
|
context
|
||||||
);
|
);
|
||||||
|
return runTask(taskId, taskQuery);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String runTask(Object taskObject)
|
public String runTask(String taskId, Object taskObject)
|
||||||
{
|
{
|
||||||
try {
|
try {
|
||||||
// Warning, magic: here we may serialize ClientTaskQuery objects, but OverlordResource.taskPost() deserializes
|
// Warning, magic: here we may serialize ClientTaskQuery objects, but OverlordResource.taskPost() deserializes
|
||||||
|
@ -114,11 +119,11 @@ public class HttpIndexingServiceClient implements IndexingServiceClient
|
||||||
if (!Strings.isNullOrEmpty(response.getContent())) {
|
if (!Strings.isNullOrEmpty(response.getContent())) {
|
||||||
throw new ISE(
|
throw new ISE(
|
||||||
"Failed to post task[%s] with error[%s].",
|
"Failed to post task[%s] with error[%s].",
|
||||||
taskObject,
|
taskId,
|
||||||
response.getContent()
|
response.getContent()
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
throw new ISE("Failed to post task[%s]. Please check overlord log", taskObject);
|
throw new ISE("Failed to post task[%s]. Please check overlord log", taskId);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -126,8 +131,14 @@ public class HttpIndexingServiceClient implements IndexingServiceClient
|
||||||
response.getContent(),
|
response.getContent(),
|
||||||
JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT
|
JacksonUtils.TYPE_REFERENCE_MAP_STRING_OBJECT
|
||||||
);
|
);
|
||||||
final String taskId = (String) resultMap.get("task");
|
final String returnedTaskId = (String) resultMap.get("task");
|
||||||
return Preconditions.checkNotNull(taskId, "Null task id for task[%s]", taskObject);
|
Preconditions.checkState(
|
||||||
|
taskId.equals(returnedTaskId),
|
||||||
|
"Got a different taskId[%s]. Expected taskId[%s]",
|
||||||
|
returnedTaskId,
|
||||||
|
taskId
|
||||||
|
);
|
||||||
|
return taskId;
|
||||||
}
|
}
|
||||||
catch (Exception e) {
|
catch (Exception e) {
|
||||||
throw new RuntimeException(e);
|
throw new RuntimeException(e);
|
||||||
|
|
|
@ -31,11 +31,12 @@ import java.util.Set;
|
||||||
|
|
||||||
public interface IndexingServiceClient
|
public interface IndexingServiceClient
|
||||||
{
|
{
|
||||||
void killUnusedSegments(String dataSource, Interval interval);
|
void killUnusedSegments(String idPrefix, String dataSource, Interval interval);
|
||||||
|
|
||||||
int killPendingSegments(String dataSource, DateTime end);
|
int killPendingSegments(String dataSource, DateTime end);
|
||||||
|
|
||||||
String compactSegments(
|
String compactSegments(
|
||||||
|
String idPrefix,
|
||||||
List<DataSegment> segments,
|
List<DataSegment> segments,
|
||||||
int compactionTaskPriority,
|
int compactionTaskPriority,
|
||||||
@Nullable ClientCompactionTaskQueryTuningConfig tuningConfig,
|
@Nullable ClientCompactionTaskQueryTuningConfig tuningConfig,
|
||||||
|
@ -44,7 +45,7 @@ public interface IndexingServiceClient
|
||||||
|
|
||||||
int getTotalWorkerCapacity();
|
int getTotalWorkerCapacity();
|
||||||
|
|
||||||
String runTask(Object taskObject);
|
String runTask(String taskId, Object taskObject);
|
||||||
|
|
||||||
String cancelTask(String taskId);
|
String cancelTask(String taskId);
|
||||||
|
|
||||||
|
|
|
@ -203,6 +203,7 @@ public class CompactSegments implements CoordinatorDuty
|
||||||
final DataSourceCompactionConfig config = compactionConfigs.get(dataSourceName);
|
final DataSourceCompactionConfig config = compactionConfigs.get(dataSourceName);
|
||||||
// make tuningConfig
|
// make tuningConfig
|
||||||
final String taskId = indexingServiceClient.compactSegments(
|
final String taskId = indexingServiceClient.compactSegments(
|
||||||
|
"coordinator-issued",
|
||||||
segmentsToCompact,
|
segmentsToCompact,
|
||||||
config.getTaskPriority(),
|
config.getTaskPriority(),
|
||||||
ClientCompactionTaskQueryTuningConfig.from(config.getTuningConfig(), config.getMaxRowsPerSegment()),
|
ClientCompactionTaskQueryTuningConfig.from(config.getTuningConfig(), config.getMaxRowsPerSegment()),
|
||||||
|
|
|
@ -111,7 +111,7 @@ public class KillUnusedSegments implements CoordinatorDuty
|
||||||
final Interval intervalToKill = findIntervalForKill(dataSource, maxSegmentsToKill);
|
final Interval intervalToKill = findIntervalForKill(dataSource, maxSegmentsToKill);
|
||||||
if (intervalToKill != null) {
|
if (intervalToKill != null) {
|
||||||
try {
|
try {
|
||||||
indexingServiceClient.killUnusedSegments(dataSource, intervalToKill);
|
indexingServiceClient.killUnusedSegments("coordinator-issued", dataSource, intervalToKill);
|
||||||
}
|
}
|
||||||
catch (Exception ex) {
|
catch (Exception ex) {
|
||||||
log.error(ex, "Failed to submit kill task for dataSource [%s]", dataSource);
|
log.error(ex, "Failed to submit kill task for dataSource [%s]", dataSource);
|
||||||
|
|
|
@ -330,7 +330,7 @@ public class DataSourcesResource
|
||||||
}
|
}
|
||||||
final Interval theInterval = Intervals.of(interval.replace('_', '/'));
|
final Interval theInterval = Intervals.of(interval.replace('_', '/'));
|
||||||
try {
|
try {
|
||||||
indexingServiceClient.killUnusedSegments(dataSourceName, theInterval);
|
indexingServiceClient.killUnusedSegments("api-issued", dataSourceName, theInterval);
|
||||||
return Response.ok().build();
|
return Response.ok().build();
|
||||||
}
|
}
|
||||||
catch (Exception e) {
|
catch (Exception e) {
|
||||||
|
|
|
@ -19,6 +19,7 @@
|
||||||
|
|
||||||
package org.apache.druid.client.indexing;
|
package org.apache.druid.client.indexing;
|
||||||
|
|
||||||
|
import nl.jqno.equalsverifier.EqualsVerifier;
|
||||||
import org.apache.druid.java.util.common.DateTimes;
|
import org.apache.druid.java.util.common.DateTimes;
|
||||||
import org.joda.time.DateTime;
|
import org.joda.time.DateTime;
|
||||||
import org.joda.time.Interval;
|
import org.joda.time.Interval;
|
||||||
|
@ -27,7 +28,7 @@ import org.junit.Assert;
|
||||||
import org.junit.Before;
|
import org.junit.Before;
|
||||||
import org.junit.Test;
|
import org.junit.Test;
|
||||||
|
|
||||||
public class ClientKillUnusedSegmentsQueryTest
|
public class ClientKillUnusedSegmentsTaskQueryTest
|
||||||
{
|
{
|
||||||
private static final String DATA_SOURCE = "data_source";
|
private static final String DATA_SOURCE = "data_source";
|
||||||
public static final DateTime START = DateTimes.nowUtc();
|
public static final DateTime START = DateTimes.nowUtc();
|
||||||
|
@ -38,7 +39,7 @@ public class ClientKillUnusedSegmentsQueryTest
|
||||||
@Before
|
@Before
|
||||||
public void setUp()
|
public void setUp()
|
||||||
{
|
{
|
||||||
clientKillUnusedSegmentsQuery = new ClientKillUnusedSegmentsTaskQuery(DATA_SOURCE, INTERVAL);
|
clientKillUnusedSegmentsQuery = new ClientKillUnusedSegmentsTaskQuery("killTaskId", DATA_SOURCE, INTERVAL);
|
||||||
}
|
}
|
||||||
|
|
||||||
@After
|
@After
|
||||||
|
@ -64,4 +65,13 @@ public class ClientKillUnusedSegmentsQueryTest
|
||||||
{
|
{
|
||||||
Assert.assertEquals(INTERVAL, clientKillUnusedSegmentsQuery.getInterval());
|
Assert.assertEquals(INTERVAL, clientKillUnusedSegmentsQuery.getInterval());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Test
|
||||||
|
public void testEquals()
|
||||||
|
{
|
||||||
|
EqualsVerifier.forClass(ClientKillUnusedSegmentsTaskQuery.class)
|
||||||
|
.usingGetClass()
|
||||||
|
.withNonnullFields("id", "dataSource", "interval")
|
||||||
|
.verify();
|
||||||
|
}
|
||||||
}
|
}
|
|
@ -33,7 +33,7 @@ import java.util.Set;
|
||||||
public class NoopIndexingServiceClient implements IndexingServiceClient
|
public class NoopIndexingServiceClient implements IndexingServiceClient
|
||||||
{
|
{
|
||||||
@Override
|
@Override
|
||||||
public void killUnusedSegments(String dataSource, Interval interval)
|
public void killUnusedSegments(String idPrefix, String dataSource, Interval interval)
|
||||||
{
|
{
|
||||||
|
|
||||||
}
|
}
|
||||||
|
@ -46,6 +46,7 @@ public class NoopIndexingServiceClient implements IndexingServiceClient
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String compactSegments(
|
public String compactSegments(
|
||||||
|
String idPrefix,
|
||||||
List<DataSegment> segments,
|
List<DataSegment> segments,
|
||||||
int compactionTaskPriority,
|
int compactionTaskPriority,
|
||||||
@Nullable ClientCompactionTaskQueryTuningConfig tuningConfig,
|
@Nullable ClientCompactionTaskQueryTuningConfig tuningConfig,
|
||||||
|
@ -62,7 +63,7 @@ public class NoopIndexingServiceClient implements IndexingServiceClient
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public String runTask(Object taskObject)
|
public String runTask(String taskId, Object taskObject)
|
||||||
{
|
{
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
|
@ -356,7 +356,6 @@ public class CompactSegmentsTest
|
||||||
private final ObjectMapper jsonMapper;
|
private final ObjectMapper jsonMapper;
|
||||||
|
|
||||||
private int compactVersionSuffix = 0;
|
private int compactVersionSuffix = 0;
|
||||||
private int idSuffix = 0;
|
|
||||||
|
|
||||||
private TestDruidLeaderClient(ObjectMapper jsonMapper)
|
private TestDruidLeaderClient(ObjectMapper jsonMapper)
|
||||||
{
|
{
|
||||||
|
@ -434,15 +433,15 @@ public class CompactSegmentsTest
|
||||||
.flatMap(holder -> Streams.sequentialStreamFrom(holder.getObject()))
|
.flatMap(holder -> Streams.sequentialStreamFrom(holder.getObject()))
|
||||||
.map(PartitionChunk::getObject)
|
.map(PartitionChunk::getObject)
|
||||||
.collect(Collectors.toList());
|
.collect(Collectors.toList());
|
||||||
final String taskId = compactSegments(
|
compactSegments(
|
||||||
timeline,
|
timeline,
|
||||||
segments,
|
segments,
|
||||||
compactionTaskQuery.getTuningConfig()
|
compactionTaskQuery.getTuningConfig()
|
||||||
);
|
);
|
||||||
return createStringFullResponseHolder(jsonMapper.writeValueAsString(ImmutableMap.of("task", taskId)));
|
return createStringFullResponseHolder(jsonMapper.writeValueAsString(ImmutableMap.of("task", taskQuery.getId())));
|
||||||
}
|
}
|
||||||
|
|
||||||
private String compactSegments(
|
private void compactSegments(
|
||||||
VersionedIntervalTimeline<String, DataSegment> timeline,
|
VersionedIntervalTimeline<String, DataSegment> timeline,
|
||||||
List<DataSegment> segments,
|
List<DataSegment> segments,
|
||||||
ClientCompactionTaskQueryTuningConfig tuningConfig
|
ClientCompactionTaskQueryTuningConfig tuningConfig
|
||||||
|
@ -503,8 +502,6 @@ public class CompactSegmentsTest
|
||||||
compactSegment.getShardSpec().createChunk(compactSegment)
|
compactSegment.getShardSpec().createChunk(compactSegment)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
return "task_" + idSuffix++;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -592,7 +592,7 @@ public class DataSourcesResourceTest
|
||||||
Interval theInterval = Intervals.of(interval.replace('_', '/'));
|
Interval theInterval = Intervals.of(interval.replace('_', '/'));
|
||||||
|
|
||||||
IndexingServiceClient indexingServiceClient = EasyMock.createStrictMock(IndexingServiceClient.class);
|
IndexingServiceClient indexingServiceClient = EasyMock.createStrictMock(IndexingServiceClient.class);
|
||||||
indexingServiceClient.killUnusedSegments("datasource1", theInterval);
|
indexingServiceClient.killUnusedSegments("api-issued", "datasource1", theInterval);
|
||||||
EasyMock.expectLastCall().once();
|
EasyMock.expectLastCall().once();
|
||||||
EasyMock.replay(indexingServiceClient, server);
|
EasyMock.replay(indexingServiceClient, server);
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue