Made client calls non blocking for JobProvider#getDataCounts(..)
Original commit: elastic/x-pack-elasticsearch@4d6d6360f6
This commit is contained in:
parent
468402426e
commit
092d2e2bdc
|
@ -22,6 +22,7 @@ import org.elasticsearch.common.io.stream.StreamInput;
|
||||||
import org.elasticsearch.common.io.stream.StreamOutput;
|
import org.elasticsearch.common.io.stream.StreamOutput;
|
||||||
import org.elasticsearch.common.io.stream.Writeable;
|
import org.elasticsearch.common.io.stream.Writeable;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
|
import org.elasticsearch.common.util.concurrent.AtomicArray;
|
||||||
import org.elasticsearch.common.xcontent.StatusToXContent;
|
import org.elasticsearch.common.xcontent.StatusToXContent;
|
||||||
import org.elasticsearch.common.xcontent.ToXContent;
|
import org.elasticsearch.common.xcontent.ToXContent;
|
||||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||||
|
@ -41,10 +42,12 @@ import org.elasticsearch.xpack.prelert.job.persistence.QueryPage;
|
||||||
import org.elasticsearch.xpack.prelert.utils.ExceptionsHelper;
|
import org.elasticsearch.xpack.prelert.utils.ExceptionsHelper;
|
||||||
|
|
||||||
import java.io.IOException;
|
import java.io.IOException;
|
||||||
import java.util.ArrayList;
|
|
||||||
import java.util.List;
|
import java.util.List;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
|
import java.util.concurrent.atomic.AtomicInteger;
|
||||||
|
import java.util.function.Consumer;
|
||||||
|
import java.util.stream.Collectors;
|
||||||
|
|
||||||
public class GetJobsStatsAction extends Action<GetJobsStatsAction.Request, GetJobsStatsAction.Response, GetJobsStatsAction.RequestBuilder> {
|
public class GetJobsStatsAction extends Action<GetJobsStatsAction.Request, GetJobsStatsAction.Response, GetJobsStatsAction.RequestBuilder> {
|
||||||
|
|
||||||
|
@ -297,24 +300,36 @@ public class GetJobsStatsAction extends Action<GetJobsStatsAction.Request, GetJo
|
||||||
@Override
|
@Override
|
||||||
protected void doExecute(Request request, ActionListener<Response> listener) {
|
protected void doExecute(Request request, ActionListener<Response> listener) {
|
||||||
logger.debug("Get stats for job '{}'", request.getJobId());
|
logger.debug("Get stats for job '{}'", request.getJobId());
|
||||||
|
|
||||||
List<Response.JobStats> jobsStats = new ArrayList<>();
|
|
||||||
QueryPage<Job> jobs = jobManager.getJob(request.getJobId(), clusterService.state());
|
QueryPage<Job> jobs = jobManager.getJob(request.getJobId(), clusterService.state());
|
||||||
PrelertMetadata prelertMetadata = clusterService.state().metaData().custom(PrelertMetadata.TYPE);
|
PrelertMetadata prelertMetadata = clusterService.state().metaData().custom(PrelertMetadata.TYPE);
|
||||||
for (Job job : jobs.results()) {
|
|
||||||
DataCounts dataCounts = readDataCounts(job.getId());
|
|
||||||
ModelSizeStats modelSizeStats = readModelSizeStats(job.getId());
|
|
||||||
JobStatus status = prelertMetadata.getAllocations().get(job.getId()).getStatus();
|
|
||||||
jobsStats.add(new Response.JobStats(job.getId(), dataCounts, modelSizeStats, status));
|
|
||||||
}
|
|
||||||
|
|
||||||
QueryPage<Response.JobStats> jobsStatsPage = new QueryPage<>(jobsStats, jobsStats.size(), Job.RESULTS_FIELD);
|
AtomicInteger counter = new AtomicInteger(0);
|
||||||
listener.onResponse(new GetJobsStatsAction.Response(jobsStatsPage));
|
AtomicArray<Response.JobStats> jobsStats = new AtomicArray<>(jobs.results().size());
|
||||||
|
for (int i = 0; i < jobs.results().size(); i++) {
|
||||||
|
int slot = i;
|
||||||
|
Job job = jobs.results().get(slot);
|
||||||
|
readDataCounts(job.getId(), dataCounts -> {
|
||||||
|
ModelSizeStats modelSizeStats = readModelSizeStats(job.getId());
|
||||||
|
JobStatus status = prelertMetadata.getAllocations().get(job.getId()).getStatus();
|
||||||
|
jobsStats.setOnce(slot, new Response.JobStats(job.getId(), dataCounts, modelSizeStats, status));
|
||||||
|
|
||||||
|
if (counter.incrementAndGet() == jobsStats.length()) {
|
||||||
|
List<Response.JobStats> results =
|
||||||
|
jobsStats.asList().stream().map(entry -> entry.value).collect(Collectors.toList());
|
||||||
|
QueryPage<Response.JobStats> jobsStatsPage = new QueryPage<>(results, results.size(), Job.RESULTS_FIELD);
|
||||||
|
listener.onResponse(new GetJobsStatsAction.Response(jobsStatsPage));
|
||||||
|
}
|
||||||
|
}, listener::onFailure);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private DataCounts readDataCounts(String jobId) {
|
private void readDataCounts(String jobId, Consumer<DataCounts> handler, Consumer<Exception> errorHandler) {
|
||||||
Optional<DataCounts> counts = processManager.getDataCounts(jobId);
|
Optional<DataCounts> counts = processManager.getDataCounts(jobId);
|
||||||
return counts.orElseGet(() -> jobProvider.dataCounts(jobId));
|
if (counts.isPresent()) {
|
||||||
|
handler.accept(counts.get());
|
||||||
|
} else {
|
||||||
|
jobProvider.dataCounts(jobId, handler, errorHandler);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private ModelSizeStats readModelSizeStats(String jobId) {
|
private ModelSizeStats readModelSizeStats(String jobId) {
|
||||||
|
|
|
@ -36,16 +36,15 @@ import org.elasticsearch.common.xcontent.XContentParser;
|
||||||
import org.elasticsearch.rest.RestStatus;
|
import org.elasticsearch.rest.RestStatus;
|
||||||
import org.elasticsearch.threadpool.ThreadPool;
|
import org.elasticsearch.threadpool.ThreadPool;
|
||||||
import org.elasticsearch.transport.TransportService;
|
import org.elasticsearch.transport.TransportService;
|
||||||
import org.elasticsearch.xpack.prelert.job.DataCounts;
|
|
||||||
import org.elasticsearch.xpack.prelert.job.Job;
|
import org.elasticsearch.xpack.prelert.job.Job;
|
||||||
import org.elasticsearch.xpack.prelert.job.JobStatus;
|
import org.elasticsearch.xpack.prelert.job.JobStatus;
|
||||||
import org.elasticsearch.xpack.prelert.job.ModelSnapshot;
|
import org.elasticsearch.xpack.prelert.job.ModelSnapshot;
|
||||||
import org.elasticsearch.xpack.prelert.job.manager.JobManager;
|
import org.elasticsearch.xpack.prelert.job.manager.JobManager;
|
||||||
import org.elasticsearch.xpack.prelert.job.messages.Messages;
|
import org.elasticsearch.xpack.prelert.job.messages.Messages;
|
||||||
import org.elasticsearch.xpack.prelert.job.metadata.Allocation;
|
import org.elasticsearch.xpack.prelert.job.metadata.Allocation;
|
||||||
|
import org.elasticsearch.xpack.prelert.job.persistence.JobDataCountsPersister;
|
||||||
import org.elasticsearch.xpack.prelert.job.persistence.JobDataDeleterFactory;
|
import org.elasticsearch.xpack.prelert.job.persistence.JobDataDeleterFactory;
|
||||||
import org.elasticsearch.xpack.prelert.job.persistence.JobProvider;
|
import org.elasticsearch.xpack.prelert.job.persistence.JobProvider;
|
||||||
import org.elasticsearch.xpack.prelert.job.persistence.JobDataCountsPersister;
|
|
||||||
import org.elasticsearch.xpack.prelert.job.persistence.OldDataRemover;
|
import org.elasticsearch.xpack.prelert.job.persistence.OldDataRemover;
|
||||||
import org.elasticsearch.xpack.prelert.job.persistence.QueryPage;
|
import org.elasticsearch.xpack.prelert.job.persistence.QueryPage;
|
||||||
import org.elasticsearch.xpack.prelert.utils.ExceptionsHelper;
|
import org.elasticsearch.xpack.prelert.utils.ExceptionsHelper;
|
||||||
|
@ -419,19 +418,20 @@ extends Action<RevertModelSnapshotAction.Request, RevertModelSnapshotAction.Resp
|
||||||
|
|
||||||
return ActionListener.wrap(response -> {
|
return ActionListener.wrap(response -> {
|
||||||
if (response.isAcknowledged()) {
|
if (response.isAcknowledged()) {
|
||||||
DataCounts counts = jobProvider.dataCounts(jobId);
|
jobProvider.dataCounts(jobId, counts -> {
|
||||||
counts.setLatestRecordTimeStamp(modelSnapshot.getLatestRecordTimeStamp());
|
counts.setLatestRecordTimeStamp(modelSnapshot.getLatestRecordTimeStamp());
|
||||||
jobDataCountsPersister.persistDataCounts(jobId, counts, new ActionListener<Boolean>() {
|
jobDataCountsPersister.persistDataCounts(jobId, counts, new ActionListener<Boolean>() {
|
||||||
@Override
|
@Override
|
||||||
public void onResponse(Boolean aBoolean) {
|
public void onResponse(Boolean aBoolean) {
|
||||||
listener.onResponse(response);
|
listener.onResponse(response);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void onFailure(Exception e) {
|
public void onFailure(Exception e) {
|
||||||
listener.onFailure(e);
|
listener.onFailure(e);
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
|
}, listener::onFailure);
|
||||||
}
|
}
|
||||||
}, listener::onFailure);
|
}, listener::onFailure);
|
||||||
}
|
}
|
||||||
|
|
|
@ -10,7 +10,6 @@ import org.elasticsearch.ElasticsearchStatusException;
|
||||||
import org.elasticsearch.action.ActionListener;
|
import org.elasticsearch.action.ActionListener;
|
||||||
import org.elasticsearch.client.Client;
|
import org.elasticsearch.client.Client;
|
||||||
import org.elasticsearch.common.component.AbstractComponent;
|
import org.elasticsearch.common.component.AbstractComponent;
|
||||||
import org.elasticsearch.common.settings.ClusterSettings;
|
|
||||||
import org.elasticsearch.common.settings.Setting;
|
import org.elasticsearch.common.settings.Setting;
|
||||||
import org.elasticsearch.common.settings.Settings;
|
import org.elasticsearch.common.settings.Settings;
|
||||||
import org.elasticsearch.rest.RestStatus;
|
import org.elasticsearch.rest.RestStatus;
|
||||||
|
@ -24,10 +23,10 @@ import org.elasticsearch.xpack.prelert.job.ModelSizeStats;
|
||||||
import org.elasticsearch.xpack.prelert.job.data.DataProcessor;
|
import org.elasticsearch.xpack.prelert.job.data.DataProcessor;
|
||||||
import org.elasticsearch.xpack.prelert.job.metadata.Allocation;
|
import org.elasticsearch.xpack.prelert.job.metadata.Allocation;
|
||||||
import org.elasticsearch.xpack.prelert.job.persistence.JobDataCountsPersister;
|
import org.elasticsearch.xpack.prelert.job.persistence.JobDataCountsPersister;
|
||||||
import org.elasticsearch.xpack.prelert.job.persistence.JobRenormalizedResultsPersister;
|
|
||||||
import org.elasticsearch.xpack.prelert.job.persistence.UsagePersister;
|
|
||||||
import org.elasticsearch.xpack.prelert.job.persistence.JobProvider;
|
import org.elasticsearch.xpack.prelert.job.persistence.JobProvider;
|
||||||
|
import org.elasticsearch.xpack.prelert.job.persistence.JobRenormalizedResultsPersister;
|
||||||
import org.elasticsearch.xpack.prelert.job.persistence.JobResultsPersister;
|
import org.elasticsearch.xpack.prelert.job.persistence.JobResultsPersister;
|
||||||
|
import org.elasticsearch.xpack.prelert.job.persistence.UsagePersister;
|
||||||
import org.elasticsearch.xpack.prelert.job.process.autodetect.AutodetectCommunicator;
|
import org.elasticsearch.xpack.prelert.job.process.autodetect.AutodetectCommunicator;
|
||||||
import org.elasticsearch.xpack.prelert.job.process.autodetect.AutodetectProcess;
|
import org.elasticsearch.xpack.prelert.job.process.autodetect.AutodetectProcess;
|
||||||
import org.elasticsearch.xpack.prelert.job.process.autodetect.AutodetectProcessFactory;
|
import org.elasticsearch.xpack.prelert.job.process.autodetect.AutodetectProcessFactory;
|
||||||
|
@ -52,8 +51,10 @@ import java.util.Locale;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.concurrent.ConcurrentHashMap;
|
import java.util.concurrent.ConcurrentHashMap;
|
||||||
import java.util.concurrent.ConcurrentMap;
|
import java.util.concurrent.ConcurrentMap;
|
||||||
|
import java.util.concurrent.CountDownLatch;
|
||||||
import java.util.concurrent.ExecutorService;
|
import java.util.concurrent.ExecutorService;
|
||||||
import java.util.concurrent.TimeoutException;
|
import java.util.concurrent.TimeoutException;
|
||||||
|
import java.util.concurrent.atomic.AtomicReference;
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
|
|
||||||
public class AutodetectProcessManager extends AbstractComponent implements DataProcessor {
|
public class AutodetectProcessManager extends AbstractComponent implements DataProcessor {
|
||||||
|
@ -190,7 +191,7 @@ public class AutodetectProcessManager extends AbstractComponent implements DataP
|
||||||
ExecutorService executorService = threadPool.executor(PrelertPlugin.AUTODETECT_PROCESS_THREAD_POOL_NAME);
|
ExecutorService executorService = threadPool.executor(PrelertPlugin.AUTODETECT_PROCESS_THREAD_POOL_NAME);
|
||||||
|
|
||||||
UsageReporter usageReporter = new UsageReporter(settings, job.getId(), usagePersister);
|
UsageReporter usageReporter = new UsageReporter(settings, job.getId(), usagePersister);
|
||||||
try (StatusReporter statusReporter = new StatusReporter(threadPool, settings, job.getId(), jobProvider.dataCounts(jobId),
|
try (StatusReporter statusReporter = new StatusReporter(threadPool, settings, job.getId(), fetchDataCounts(jobId),
|
||||||
usageReporter, jobDataCountsPersister)) {
|
usageReporter, jobDataCountsPersister)) {
|
||||||
ScoresUpdater scoresUpdator = new ScoresUpdater(job, jobProvider, jobRenormalizedResultsPersister, normalizerFactory);
|
ScoresUpdater scoresUpdator = new ScoresUpdater(job, jobProvider, jobRenormalizedResultsPersister, normalizerFactory);
|
||||||
Renormalizer renormalizer = new ShortCircuitingRenormalizer(jobId, scoresUpdator,
|
Renormalizer renormalizer = new ShortCircuitingRenormalizer(jobId, scoresUpdator,
|
||||||
|
@ -212,6 +213,28 @@ public class AutodetectProcessManager extends AbstractComponent implements DataP
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private DataCounts fetchDataCounts(String jobId) {
|
||||||
|
CountDownLatch latch = new CountDownLatch(1);
|
||||||
|
AtomicReference<DataCounts> holder = new AtomicReference<>();
|
||||||
|
AtomicReference<Exception> errorHolder = new AtomicReference<>();
|
||||||
|
jobProvider.dataCounts(jobId, dataCounts -> {
|
||||||
|
holder.set(dataCounts);
|
||||||
|
latch.countDown();
|
||||||
|
}, e -> {
|
||||||
|
errorHolder.set(e);
|
||||||
|
latch.countDown();
|
||||||
|
});
|
||||||
|
try {
|
||||||
|
latch.await();
|
||||||
|
} catch (InterruptedException e) {
|
||||||
|
Thread.currentThread().interrupt();
|
||||||
|
}
|
||||||
|
if (errorHolder.get() != null) {
|
||||||
|
throw org.elasticsearch.ExceptionsHelper.convertToElastic(errorHolder.get());
|
||||||
|
}
|
||||||
|
return holder.get();
|
||||||
|
}
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
public void closeJob(String jobId) {
|
public void closeJob(String jobId) {
|
||||||
logger.debug("Closing job {}", jobId);
|
logger.debug("Closing job {}", jobId);
|
||||||
|
|
|
@ -338,30 +338,32 @@ public class JobProvider {
|
||||||
* Get the job's data counts
|
* Get the job's data counts
|
||||||
*
|
*
|
||||||
* @param jobId The job id
|
* @param jobId The job id
|
||||||
* @return The dataCounts or default constructed object if not found
|
|
||||||
*/
|
*/
|
||||||
public DataCounts dataCounts(String jobId) {
|
public void dataCounts(String jobId, Consumer<DataCounts> handler, Consumer<Exception> errorHandler) {
|
||||||
String indexName = AnomalyDetectorsIndex.jobResultsIndexName(jobId);
|
String indexName = AnomalyDetectorsIndex.jobResultsIndexName(jobId);
|
||||||
|
GetRequest getRequest = new GetRequest(indexName, DataCounts.TYPE.getPreferredName(), jobId + DataCounts.DOCUMENT_SUFFIX);
|
||||||
try {
|
client.get(getRequest, ActionListener.wrap(
|
||||||
GetRequest getRequest = new GetRequest(indexName, DataCounts.TYPE.getPreferredName(), jobId + DataCounts.DOCUMENT_SUFFIX);
|
response -> {
|
||||||
GetResponse response = FixBlockingClientOperations.executeBlocking(client, GetAction.INSTANCE, getRequest);
|
if (response.isExists() == false) {
|
||||||
if (response.isExists() == false) {
|
handler.accept(new DataCounts(jobId));
|
||||||
return new DataCounts(jobId);
|
} else {
|
||||||
} else {
|
BytesReference source = response.getSourceAsBytesRef();
|
||||||
BytesReference source = response.getSourceAsBytesRef();
|
XContentParser parser;
|
||||||
XContentParser parser;
|
try {
|
||||||
try {
|
parser = XContentFactory.xContent(source).createParser(NamedXContentRegistry.EMPTY, source);
|
||||||
parser = XContentFactory.xContent(source).createParser(NamedXContentRegistry.EMPTY, source);
|
handler.accept(DataCounts.PARSER.apply(parser, () -> parseFieldMatcher));
|
||||||
return DataCounts.PARSER.apply(parser, () -> parseFieldMatcher);
|
} catch (IOException e) {
|
||||||
} catch (IOException e) {
|
throw new ElasticsearchParseException("failed to parse bucket", e);
|
||||||
throw new ElasticsearchParseException("failed to parse bucket", e);
|
}
|
||||||
}
|
}
|
||||||
}
|
},
|
||||||
|
e -> {
|
||||||
} catch (IndexNotFoundException e) {
|
if (e instanceof IndexNotFoundException) {
|
||||||
throw ExceptionsHelper.missingJobException(jobId);
|
errorHandler.accept(ExceptionsHelper.missingJobException(jobId));
|
||||||
}
|
} else {
|
||||||
|
errorHandler.accept(e);
|
||||||
|
}
|
||||||
|
}));
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
|
@ -30,12 +30,15 @@ import org.elasticsearch.xpack.prelert.job.metadata.Allocation;
|
||||||
import org.elasticsearch.xpack.prelert.job.metadata.PrelertMetadata;
|
import org.elasticsearch.xpack.prelert.job.metadata.PrelertMetadata;
|
||||||
import org.elasticsearch.xpack.prelert.job.persistence.BucketsQueryBuilder;
|
import org.elasticsearch.xpack.prelert.job.persistence.BucketsQueryBuilder;
|
||||||
import org.elasticsearch.xpack.prelert.job.persistence.JobProvider;
|
import org.elasticsearch.xpack.prelert.job.persistence.JobProvider;
|
||||||
|
import org.elasticsearch.xpack.prelert.job.persistence.QueryPage;
|
||||||
import org.elasticsearch.xpack.prelert.job.results.Bucket;
|
import org.elasticsearch.xpack.prelert.job.results.Bucket;
|
||||||
import org.elasticsearch.xpack.prelert.utils.ExceptionsHelper;
|
import org.elasticsearch.xpack.prelert.utils.ExceptionsHelper;
|
||||||
|
|
||||||
import java.time.Duration;
|
import java.time.Duration;
|
||||||
|
import java.util.Collections;
|
||||||
import java.util.Objects;
|
import java.util.Objects;
|
||||||
import java.util.concurrent.Future;
|
import java.util.concurrent.Future;
|
||||||
|
import java.util.function.BiConsumer;
|
||||||
import java.util.function.Consumer;
|
import java.util.function.Consumer;
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
|
|
||||||
|
@ -66,27 +69,19 @@ public class ScheduledJobRunner extends AbstractComponent {
|
||||||
|
|
||||||
Scheduler scheduler = prelertMetadata.getScheduler(schedulerId);
|
Scheduler scheduler = prelertMetadata.getScheduler(schedulerId);
|
||||||
Job job = prelertMetadata.getJobs().get(scheduler.getJobId());
|
Job job = prelertMetadata.getJobs().get(scheduler.getJobId());
|
||||||
BucketsQueryBuilder.BucketsQuery latestBucketQuery = new BucketsQueryBuilder()
|
gatherInformation(job.getId(), (buckets, dataCounts) -> {
|
||||||
.sortField(Bucket.TIMESTAMP.getPreferredName())
|
|
||||||
.sortDescending(true).size(1)
|
|
||||||
.includeInterim(false)
|
|
||||||
.build();
|
|
||||||
jobProvider.buckets(job.getId(), latestBucketQuery, buckets -> {
|
|
||||||
long latestFinalBucketEndMs = -1L;
|
long latestFinalBucketEndMs = -1L;
|
||||||
Duration bucketSpan = Duration.ofSeconds(job.getAnalysisConfig().getBucketSpan());
|
Duration bucketSpan = Duration.ofSeconds(job.getAnalysisConfig().getBucketSpan());
|
||||||
if (buckets.results().size() == 1) {
|
if (buckets.results().size() == 1) {
|
||||||
latestFinalBucketEndMs = buckets.results().get(0).getTimestamp().getTime() + bucketSpan.toMillis() - 1;
|
latestFinalBucketEndMs = buckets.results().get(0).getTimestamp().getTime() + bucketSpan.toMillis() - 1;
|
||||||
}
|
}
|
||||||
Holder holder = createJobScheduler(scheduler, job, latestFinalBucketEndMs, handler, task);
|
long latestRecordTimeMs = -1L;
|
||||||
innerRun(holder, startTime, endTime);
|
if (dataCounts.getLatestRecordTimeStamp() != null) {
|
||||||
}, e -> {
|
latestRecordTimeMs = dataCounts.getLatestRecordTimeStamp().getTime();
|
||||||
if (e instanceof ResourceNotFoundException) {
|
|
||||||
Holder holder = createJobScheduler(scheduler, job, -1L, handler, task);
|
|
||||||
innerRun(holder, startTime, endTime);
|
|
||||||
} else {
|
|
||||||
handler.accept(e);
|
|
||||||
}
|
}
|
||||||
});
|
Holder holder = createJobScheduler(scheduler, job, latestFinalBucketEndMs, latestRecordTimeMs, handler, task);
|
||||||
|
innerRun(holder, startTime, endTime);
|
||||||
|
}, handler);
|
||||||
}
|
}
|
||||||
|
|
||||||
// Important: Holder must be created and assigned to SchedulerTask before setting status to started,
|
// Important: Holder must be created and assigned to SchedulerTask before setting status to started,
|
||||||
|
@ -192,26 +187,35 @@ public class ScheduledJobRunner extends AbstractComponent {
|
||||||
ScheduledJobValidator.validate(scheduler.getConfig(), job);
|
ScheduledJobValidator.validate(scheduler.getConfig(), job);
|
||||||
}
|
}
|
||||||
|
|
||||||
private Holder createJobScheduler(Scheduler scheduler, Job job, long latestFinalBucketEndMs, Consumer<Exception> handler,
|
private Holder createJobScheduler(Scheduler scheduler, Job job, long finalBucketEndMs, long latestRecordTimeMs,
|
||||||
StartSchedulerAction.SchedulerTask task) {
|
Consumer<Exception> handler, StartSchedulerAction.SchedulerTask task) {
|
||||||
Auditor auditor = jobProvider.audit(job.getId());
|
Auditor auditor = jobProvider.audit(job.getId());
|
||||||
Duration frequency = getFrequencyOrDefault(scheduler, job);
|
Duration frequency = getFrequencyOrDefault(scheduler, job);
|
||||||
Duration queryDelay = Duration.ofSeconds(scheduler.getConfig().getQueryDelay());
|
Duration queryDelay = Duration.ofSeconds(scheduler.getConfig().getQueryDelay());
|
||||||
DataExtractor dataExtractor = dataExtractorFactory.newExtractor(scheduler.getConfig(), job);
|
DataExtractor dataExtractor = dataExtractorFactory.newExtractor(scheduler.getConfig(), job);
|
||||||
ScheduledJob scheduledJob = new ScheduledJob(job.getId(), frequency.toMillis(), queryDelay.toMillis(),
|
ScheduledJob scheduledJob = new ScheduledJob(job.getId(), frequency.toMillis(), queryDelay.toMillis(),
|
||||||
dataExtractor, client, auditor, currentTimeSupplier, latestFinalBucketEndMs, getLatestRecordTimestamp(job.getId()));
|
dataExtractor, client, auditor, currentTimeSupplier, finalBucketEndMs, latestRecordTimeMs);
|
||||||
Holder holder = new Holder(scheduler, scheduledJob, new ProblemTracker(() -> auditor), handler);
|
Holder holder = new Holder(scheduler, scheduledJob, new ProblemTracker(() -> auditor), handler);
|
||||||
task.setHolder(holder);
|
task.setHolder(holder);
|
||||||
return holder;
|
return holder;
|
||||||
}
|
}
|
||||||
|
|
||||||
private long getLatestRecordTimestamp(String jobId) {
|
private void gatherInformation(String jobId, BiConsumer<QueryPage<Bucket>, DataCounts> handler, Consumer<Exception> errorHandler) {
|
||||||
long latestRecordTimeMs = -1L;
|
BucketsQueryBuilder.BucketsQuery latestBucketQuery = new BucketsQueryBuilder()
|
||||||
DataCounts dataCounts = jobProvider.dataCounts(jobId);
|
.sortField(Bucket.TIMESTAMP.getPreferredName())
|
||||||
if (dataCounts.getLatestRecordTimeStamp() != null) {
|
.sortDescending(true).size(1)
|
||||||
latestRecordTimeMs = dataCounts.getLatestRecordTimeStamp().getTime();
|
.includeInterim(false)
|
||||||
}
|
.build();
|
||||||
return latestRecordTimeMs;
|
jobProvider.buckets(jobId, latestBucketQuery, buckets -> {
|
||||||
|
jobProvider.dataCounts(jobId, dataCounts -> handler.accept(buckets, dataCounts), errorHandler);
|
||||||
|
}, e -> {
|
||||||
|
if (e instanceof ResourceNotFoundException) {
|
||||||
|
QueryPage<Bucket> empty = new QueryPage<>(Collections.emptyList(), 0, Bucket.RESULT_TYPE_FIELD);
|
||||||
|
jobProvider.dataCounts(jobId, dataCounts -> handler.accept(empty, dataCounts), errorHandler);
|
||||||
|
} else {
|
||||||
|
errorHandler.accept(e);
|
||||||
|
}
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
private static Duration getFrequencyOrDefault(Scheduler scheduler, Job job) {
|
private static Duration getFrequencyOrDefault(Scheduler scheduler, Job job) {
|
||||||
|
|
|
@ -43,10 +43,9 @@ import java.io.IOException;
|
||||||
import java.io.InputStream;
|
import java.io.InputStream;
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.HashSet;
|
|
||||||
import java.util.Iterator;
|
import java.util.Iterator;
|
||||||
import java.util.Set;
|
|
||||||
import java.util.concurrent.ExecutorService;
|
import java.util.concurrent.ExecutorService;
|
||||||
|
import java.util.function.Consumer;
|
||||||
import java.util.function.Supplier;
|
import java.util.function.Supplier;
|
||||||
import java.util.stream.Stream;
|
import java.util.stream.Stream;
|
||||||
|
|
||||||
|
@ -104,13 +103,17 @@ public class AutodetectProcessManagerTests extends ESTestCase {
|
||||||
|
|
||||||
public void testOpenJob_exceedMaxNumJobs() {
|
public void testOpenJob_exceedMaxNumJobs() {
|
||||||
when(jobManager.getJobOrThrowIfUnknown("foo")).thenReturn(createJobDetails("foo"));
|
when(jobManager.getJobOrThrowIfUnknown("foo")).thenReturn(createJobDetails("foo"));
|
||||||
when(jobProvider.dataCounts("foo")).thenReturn(new DataCounts("foo"));
|
doAnswer(invocationOnMock -> {
|
||||||
|
String jobId = (String) invocationOnMock.getArguments()[0];
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
Consumer<DataCounts> handler = (Consumer<DataCounts>) invocationOnMock.getArguments()[1];
|
||||||
|
handler.accept(new DataCounts(jobId));
|
||||||
|
return null;
|
||||||
|
}).when(jobProvider).dataCounts(any(), any(), any());
|
||||||
|
|
||||||
when(jobManager.getJobOrThrowIfUnknown("bar")).thenReturn(createJobDetails("bar"));
|
when(jobManager.getJobOrThrowIfUnknown("bar")).thenReturn(createJobDetails("bar"));
|
||||||
when(jobProvider.dataCounts("bar")).thenReturn(new DataCounts("bar"));
|
|
||||||
when(jobManager.getJobOrThrowIfUnknown("baz")).thenReturn(createJobDetails("baz"));
|
when(jobManager.getJobOrThrowIfUnknown("baz")).thenReturn(createJobDetails("baz"));
|
||||||
when(jobProvider.dataCounts("baz")).thenReturn(new DataCounts("baz"));
|
|
||||||
when(jobManager.getJobOrThrowIfUnknown("foobar")).thenReturn(createJobDetails("foobar"));
|
when(jobManager.getJobOrThrowIfUnknown("foobar")).thenReturn(createJobDetails("foobar"));
|
||||||
when(jobProvider.dataCounts("foobar")).thenReturn(new DataCounts("foobar"));
|
|
||||||
|
|
||||||
Client client = mock(Client.class);
|
Client client = mock(Client.class);
|
||||||
ThreadPool threadPool = mock(ThreadPool.class);
|
ThreadPool threadPool = mock(ThreadPool.class);
|
||||||
|
@ -275,7 +278,13 @@ public class AutodetectProcessManagerTests extends ESTestCase {
|
||||||
doThrow(new EsRejectedExecutionException("")).when(executorService).execute(any());
|
doThrow(new EsRejectedExecutionException("")).when(executorService).execute(any());
|
||||||
when(threadPool.executor(anyString())).thenReturn(executorService);
|
when(threadPool.executor(anyString())).thenReturn(executorService);
|
||||||
when(jobManager.getJobOrThrowIfUnknown("my_id")).thenReturn(createJobDetails("my_id"));
|
when(jobManager.getJobOrThrowIfUnknown("my_id")).thenReturn(createJobDetails("my_id"));
|
||||||
when(jobProvider.dataCounts("my_id")).thenReturn(new DataCounts("my_id"));
|
doAnswer(invocationOnMock -> {
|
||||||
|
String jobId = (String) invocationOnMock.getArguments()[0];
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
Consumer<DataCounts> handler = (Consumer<DataCounts>) invocationOnMock.getArguments()[1];
|
||||||
|
handler.accept(new DataCounts(jobId));
|
||||||
|
return null;
|
||||||
|
}).when(jobProvider).dataCounts(eq("my_id"), any(), any());
|
||||||
|
|
||||||
AutodetectResultsParser parser = mock(AutodetectResultsParser.class);
|
AutodetectResultsParser parser = mock(AutodetectResultsParser.class);
|
||||||
AutodetectProcess autodetectProcess = mock(AutodetectProcess.class);
|
AutodetectProcess autodetectProcess = mock(AutodetectProcess.class);
|
||||||
|
|
|
@ -15,6 +15,7 @@ import org.elasticsearch.cluster.ClusterState;
|
||||||
import org.elasticsearch.cluster.metadata.MetaData;
|
import org.elasticsearch.cluster.metadata.MetaData;
|
||||||
import org.elasticsearch.cluster.service.ClusterService;
|
import org.elasticsearch.cluster.service.ClusterService;
|
||||||
import org.elasticsearch.common.unit.TimeValue;
|
import org.elasticsearch.common.unit.TimeValue;
|
||||||
|
import org.elasticsearch.mock.orig.Mockito;
|
||||||
import org.elasticsearch.search.SearchRequestParsers;
|
import org.elasticsearch.search.SearchRequestParsers;
|
||||||
import org.elasticsearch.test.ESTestCase;
|
import org.elasticsearch.test.ESTestCase;
|
||||||
import org.elasticsearch.threadpool.ThreadPool;
|
import org.elasticsearch.threadpool.ThreadPool;
|
||||||
|
@ -85,7 +86,13 @@ public class ScheduledJobRunnerTests extends ESTestCase {
|
||||||
}).when(client).execute(same(UpdateSchedulerStatusAction.INSTANCE), any(), any());
|
}).when(client).execute(same(UpdateSchedulerStatusAction.INSTANCE), any(), any());
|
||||||
|
|
||||||
JobProvider jobProvider = mock(JobProvider.class);
|
JobProvider jobProvider = mock(JobProvider.class);
|
||||||
when(jobProvider.dataCounts(anyString())).thenReturn(new DataCounts("foo"));
|
Mockito.doAnswer(invocationOnMock -> {
|
||||||
|
String jobId = (String) invocationOnMock.getArguments()[0];
|
||||||
|
@SuppressWarnings("unchecked")
|
||||||
|
Consumer<DataCounts> handler = (Consumer<DataCounts>) invocationOnMock.getArguments()[1];
|
||||||
|
handler.accept(new DataCounts(jobId));
|
||||||
|
return null;
|
||||||
|
}).when(jobProvider).dataCounts(any(), any(), any());
|
||||||
dataExtractorFactory = mock(DataExtractorFactory.class);
|
dataExtractorFactory = mock(DataExtractorFactory.class);
|
||||||
Auditor auditor = mock(Auditor.class);
|
Auditor auditor = mock(Auditor.class);
|
||||||
threadPool = mock(ThreadPool.class);
|
threadPool = mock(ThreadPool.class);
|
||||||
|
|
Loading…
Reference in New Issue