Original commit: elastic/x-pack-elasticsearch@c83c3ebcc1
This commit is contained in:
David Kyle 2017-12-04 13:48:24 +00:00
parent cb9314ba78
commit d39c8b76db
1 changed files with 16 additions and 13 deletions

View File

@ -50,7 +50,7 @@ class DatafeedJob {
private final Supplier<Long> currentTimeSupplier;
private volatile long lookbackStartTimeMs;
private volatile long lastEndTimeMs;
private volatile Long lastEndTimeMs;
private AtomicBoolean running = new AtomicBoolean(true);
private volatile boolean isIsolated;
@ -122,6 +122,7 @@ class DatafeedJob {
}
private long skipToStartTime(long startTime) {
if (lastEndTimeMs != null) {
if (lastEndTimeMs + 1 > startTime) {
// start time is before last checkpoint, thus continue from checkpoint
return lastEndTimeMs + 1;
@ -133,9 +134,11 @@ class DatafeedJob {
LOGGER.info("Skipped to time [" + flushResponse.getLastFinalizedBucketEnd().getTime() + "]");
return flushResponse.getLastFinalizedBucketEnd().getTime();
}
return startTime;
}
long runRealtime() throws Exception {
long start = Math.max(lookbackStartTimeMs, lastEndTimeMs + 1);
long start = lastEndTimeMs == null ? lookbackStartTimeMs : Math.max(lookbackStartTimeMs, lastEndTimeMs + 1);
long nowMinusQueryDelay = currentTimeSupplier.get() - queryDelayMs;
long end = toIntervalStartEpochMs(nowMinusQueryDelay);
FlushJobAction.Request request = new FlushJobAction.Request(jobId);
@ -238,7 +241,7 @@ class DatafeedJob {
}
}
lastEndTimeMs = Math.max(lastEndTimeMs, end - 1);
lastEndTimeMs = Math.max(lastEndTimeMs == null ? 0 : lastEndTimeMs, end - 1);
LOGGER.debug("[{}] Complete iterating data extractor [{}], [{}], [{}], [{}], [{}]", jobId, error, recordCount,
lastEndTimeMs, isRunning(), dataExtractor.isCancelled());
@ -310,7 +313,7 @@ class DatafeedJob {
/**
* Visible for testing
*/
long lastEndTimeMs() {
Long lastEndTimeMs() {
return lastEndTimeMs;
}