[TEST] Increased datafeed logging
Original commit: elastic/x-pack-elasticsearch@403bc28dea
This commit is contained in:
parent
d3a2e34f9d
commit
5dc8c71e65
|
@ -165,6 +165,7 @@ class DatafeedJob {
|
|||
DataCounts counts;
|
||||
try (InputStream in = extractedData.get()) {
|
||||
counts = postData(in, XContentType.JSON);
|
||||
LOGGER.trace("[{}] Processed another {} records", jobId, counts.getProcessedRecordCount());
|
||||
} catch (Exception e) {
|
||||
if (e instanceof InterruptedException) {
|
||||
Thread.currentThread().interrupt();
|
||||
|
@ -190,6 +191,8 @@ class DatafeedJob {
|
|||
}
|
||||
|
||||
lastEndTimeMs = Math.max(lastEndTimeMs == null ? 0 : lastEndTimeMs, end - 1);
|
||||
LOGGER.debug("[{}] Complete iterating data extractor [{}], [{}], [{}], [{}], [{}]", jobId, error, recordCount,
|
||||
lastEndTimeMs, isRunning(), dataExtractor.isCancelled());
|
||||
|
||||
// We can now throw any stored error as we have updated time.
|
||||
if (error != null) {
|
||||
|
@ -235,6 +238,7 @@ class DatafeedJob {
|
|||
|
||||
private void flushJob(FlushJobAction.Request flushRequest) {
|
||||
try {
|
||||
LOGGER.trace("[" + jobId + "] Sending flush request");
|
||||
client.execute(FlushJobAction.INSTANCE, flushRequest).actionGet();
|
||||
} catch (Exception e) {
|
||||
LOGGER.debug("[" + jobId + "] error while flushing job", e);
|
||||
|
|
|
@ -53,7 +53,7 @@ public class MlDistributedFailureIT extends BaseMlIntegTestCase {
|
|||
}
|
||||
|
||||
@TestLogging("org.elasticsearch.xpack.ml.action:DEBUG,org.elasticsearch.xpack.persistent:TRACE," +
|
||||
"org.elasticsearch.cluster.service:TRACE")
|
||||
"org.elasticsearch.xpack.ml.datafeed:TRACE")
|
||||
public void testLoseDedicatedMasterNode() throws Exception {
|
||||
internalCluster().ensureAtMostNumDataNodes(0);
|
||||
logger.info("Starting dedicated master node...");
|
||||
|
|
Loading…
Reference in New Issue