Merge branch 'master' of github.com:elastic/prelert-legacy
Original commit: elastic/x-pack-elasticsearch@c198cef9d3
This commit is contained in:
commit
b2917376f0
49
build.gradle
49
build.gradle
|
@ -1,4 +1,4 @@
|
|||
description = 'Builds the Ml Engine native binaries and Java classes'
|
||||
description = 'Builds the Machine Learning Java classes and UI'
|
||||
|
||||
import org.gradle.internal.os.OperatingSystem
|
||||
import org.gradle.plugins.ide.eclipse.model.SourceFolder
|
||||
|
@ -20,53 +20,25 @@ if (envMlAwsSecretKey != null) {
|
|||
project.ext.mlAwsSecretKey = PRELERT_AWS_SECRET_ACCESS_KEY
|
||||
}
|
||||
|
||||
String cppCrossCompile = System.env.CPP_CROSS_COMPILE
|
||||
if (cppCrossCompile != null) {
|
||||
project.ext.cppCrossCompile = cppCrossCompile
|
||||
} else if (project.hasProperty("CPP_CROSS_COMPILE")) {
|
||||
project.ext.cppCrossCompile = CPP_CROSS_COMPILE
|
||||
String envCppLocalDists = System.env.CPP_LOCAL_DISTS
|
||||
if (envCppLocalDists != null) {
|
||||
project.ext.cppLocalDists = envCppLocalDists
|
||||
} else if (project.hasProperty("CPP_LOCAL_DISTS")) {
|
||||
project.ext.cppLocalDists = CPP_LOCAL_DISTS
|
||||
} else {
|
||||
project.ext.cppCrossCompile = ''
|
||||
}
|
||||
if (project.ext.cppCrossCompile != '' && project.ext.cppCrossCompile != 'macosx') {
|
||||
throw new GradleException("CPP_CROSS_COMPILE property must be empty or 'macosx'")
|
||||
project.ext.cppLocalDists = ''
|
||||
}
|
||||
|
||||
project.ext.isWindows = OperatingSystem.current().isWindows()
|
||||
project.ext.isLinux = OperatingSystem.current().isLinux()
|
||||
project.ext.isMacOsX = OperatingSystem.current().isMacOsX()
|
||||
|
||||
project.ext.bash = project.isWindows ? "C:\\Program Files\\Git\\bin\\bash" : "/bin/bash"
|
||||
|
||||
String uploadEnabledStr = properties.get('upload', 'false')
|
||||
if (['true', 'false'].contains(uploadEnabledStr) == false) {
|
||||
throw new GradleException("upload must be true or false, got ${uploadEnabledStr}")
|
||||
}
|
||||
project.ext.uploadEnabled = uploadEnabledStr == 'true'
|
||||
|
||||
// C++ build can be explicitly enabled or disabled, or if neither is chosen
|
||||
// it will be enabled if the necessary 3rd party dependencies are present
|
||||
String cppEnabledStr = properties.get('xpack.cpp.build', 'auto')
|
||||
if (['true', 'false', 'auto'].contains(cppEnabledStr) == false) {
|
||||
throw new GradleException("xpack.cpp.build must be true or false, got ${cppEnabledStr}")
|
||||
}
|
||||
project.ext.cppEnabled = cppEnabledStr == 'true'
|
||||
if (cppEnabledStr == 'auto') {
|
||||
// Disable the C++ build if the 3rd party tools/libraries aren't available
|
||||
String[] cmdArray = [ project.ext.bash, '-c',
|
||||
'export CPP_CROSS_COMPILE=' + project.ext.cppCrossCompile + ' && source cpp/set_env.sh && cpp/3rd_party/3rd_party.sh --check' ]
|
||||
Process checkProcess = Runtime.getRuntime().exec(cmdArray, null, rootDir)
|
||||
StringBuffer checkOutput = new StringBuffer()
|
||||
checkProcess.consumeProcessOutputStream(checkOutput)
|
||||
if (checkProcess.waitFor() == 0) {
|
||||
project.ext.cppEnabled = true
|
||||
} else {
|
||||
println 'C++ dependencies not available - disabling C++ build'
|
||||
println checkOutput
|
||||
project.ext.cppEnabled = false
|
||||
}
|
||||
}
|
||||
|
||||
allprojects {
|
||||
group = 'org.elasticsearch.ml'
|
||||
version = VersionProperties.elasticsearch
|
||||
|
@ -123,7 +95,7 @@ task assemble(dependsOn: bundlePack) {
|
|||
description = 'Assembles the outputs of this project.'
|
||||
}
|
||||
|
||||
task test(dependsOn: [':elasticsearch:test', ':cpp:test', ':kibana:test']) {
|
||||
task test(dependsOn: [':elasticsearch:test', ':kibana:test']) {
|
||||
group = 'Build'
|
||||
description = 'Assembles and tests this project.'
|
||||
}
|
||||
|
@ -139,14 +111,14 @@ task clean(type: Delete) {
|
|||
delete 'build'
|
||||
}
|
||||
|
||||
task uploadPackToS3(type: UploadS3Task, dependsOn: [build]) {
|
||||
task uploadPackToS3(type: UploadS3Task, dependsOn: build) {
|
||||
enabled project.uploadEnabled
|
||||
description = 'upload pack zip to S3 Bucket'
|
||||
bucket 'prelert-artifacts'
|
||||
upload bundlePack.outputs.files.singleFile, "maven/${project.group}/${packArtifactName}/${project.version}/${bundlePack.outputs.files.singleFile.name}"
|
||||
}
|
||||
|
||||
task deploy(dependsOn: [uploadPackToS3, ':cpp:upload']) {
|
||||
task deploy(dependsOn: uploadPackToS3) {
|
||||
|
||||
}
|
||||
|
||||
|
@ -199,7 +171,6 @@ allprojects {
|
|||
}
|
||||
}
|
||||
|
||||
// intellij configuration
|
||||
allprojects {
|
||||
apply plugin: 'idea'
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ apply plugin: 'elasticsearch.esplugin'
|
|||
|
||||
esplugin {
|
||||
name 'ml'
|
||||
description 'Ml Plugin'
|
||||
description 'Machine Learning Plugin'
|
||||
classname 'org.elasticsearch.xpack.ml.MlPlugin'
|
||||
}
|
||||
|
||||
|
@ -32,8 +32,6 @@ check.dependsOn noBootstrapTest
|
|||
noBootstrapTest.mustRunAfter test
|
||||
|
||||
integTest {
|
||||
// Cannot run integration tests when cross compiling
|
||||
enabled project.cppCrossCompile == ''
|
||||
cluster {
|
||||
//setting 'useNativeProcess', 'true'
|
||||
distribution = 'zip'
|
||||
|
@ -43,8 +41,8 @@ integTest {
|
|||
integTest.mustRunAfter noBootstrapTest
|
||||
|
||||
task downloadCppDist(type: DownloadS3Task) {
|
||||
enabled project.cppEnabled == false
|
||||
description = 'download cpp zips from S3 Bucket'
|
||||
enabled project.cppLocalDists == ''
|
||||
description = 'Download C++ zips from S3 Bucket'
|
||||
bucket 'prelert-artifacts'
|
||||
destDir file("${buildDir}/cppDist")
|
||||
flatten true
|
||||
|
@ -57,12 +55,17 @@ task downloadCppDist(type: DownloadS3Task) {
|
|||
}
|
||||
|
||||
bundlePlugin {
|
||||
if (project.cppEnabled) {
|
||||
from { zipTree(project(':cpp').buildZip.outputs.files.singleFile) }
|
||||
dependsOn ':cpp:buildZip'
|
||||
if (project.cppLocalDists) {
|
||||
String localZipFile = 'ml-cpp-' +
|
||||
(project.isWindows ? "windows-x86_64" : (project.isMacOsX ? "darwin-x86_64" :
|
||||
(project.isLinux ? "linux-x86_64" : "sunos-x86_64"))) +
|
||||
"-${project.version}.zip"
|
||||
from { zipTree(cppLocalDists + '/' + localZipFile) }
|
||||
} else {
|
||||
for (outputFile in downloadCppDist.outputs.files) {
|
||||
from(zipTree(outputFile))
|
||||
from(zipTree(outputFile)) {
|
||||
duplicatesStrategy 'exclude'
|
||||
}
|
||||
}
|
||||
dependsOn 'downloadCppDist'
|
||||
}
|
||||
|
|
|
@ -50,6 +50,7 @@ public class CppLogMessageHandler implements Closeable {
|
|||
private volatile boolean hasLogStreamEnded;
|
||||
private volatile boolean seenFatalError;
|
||||
private volatile long pid;
|
||||
private volatile String cppCopyright;
|
||||
|
||||
/**
|
||||
* @param jobId May be null or empty if the logs are from a process not associated with a job.
|
||||
|
@ -132,6 +133,10 @@ public class CppLogMessageHandler implements Closeable {
|
|||
return pid;
|
||||
}
|
||||
|
||||
public String getCppCopyright() {
|
||||
return cppCopyright;
|
||||
}
|
||||
|
||||
/**
|
||||
* Expected to be called very infrequently.
|
||||
*/
|
||||
|
@ -185,12 +190,15 @@ public class CppLogMessageHandler implements Closeable {
|
|||
pid = latestPid;
|
||||
pidLatch.countDown();
|
||||
}
|
||||
String latestMessage = msg.getMessage();
|
||||
if (cppCopyright == null && latestMessage.contains("Copyright")) {
|
||||
cppCopyright = latestMessage;
|
||||
}
|
||||
// TODO: Is there a way to preserve the original timestamp when re-logging?
|
||||
if (jobId != null) {
|
||||
LOGGER.log(level, "[{}] {}/{} {}@{} {}", jobId, msg.getLogger(), latestPid, msg.getFile(), msg.getLine(),
|
||||
msg.getMessage());
|
||||
LOGGER.log(level, "[{}] {}/{} {}@{} {}", jobId, msg.getLogger(), latestPid, msg.getFile(), msg.getLine(), latestMessage);
|
||||
} else {
|
||||
LOGGER.log(level, "{}/{} {}@{} {}", msg.getLogger(), latestPid, msg.getFile(), msg.getLine(), msg.getMessage());
|
||||
LOGGER.log(level, "{}/{} {}@{} {}", msg.getLogger(), latestPid, msg.getFile(), msg.getLine(), latestMessage);
|
||||
}
|
||||
// TODO: Could send the message for indexing instead of or as well as logging it
|
||||
} catch (IOException e) {
|
||||
|
|
|
@ -16,9 +16,9 @@ import org.elasticsearch.xpack.ml.job.results.Bucket;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
class ElasticsearchBatchedBucketsIterator extends ElasticsearchBatchedResultsIterator<Bucket> {
|
||||
class BatchedBucketsIterator extends BatchedResultsIterator<Bucket> {
|
||||
|
||||
public ElasticsearchBatchedBucketsIterator(Client client, String jobId) {
|
||||
public BatchedBucketsIterator(Client client, String jobId) {
|
||||
super(client, jobId, Bucket.RESULT_TYPE_VALUE);
|
||||
}
|
||||
|
|
@ -5,14 +5,59 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.job.persistence;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.SearchScrollRequest;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
import org.elasticsearch.xpack.ml.job.results.Bucket;
|
||||
|
||||
import java.util.ArrayDeque;
|
||||
import java.util.Arrays;
|
||||
import java.util.Deque;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Objects;
|
||||
|
||||
/**
|
||||
* An iterator useful to fetch a big number of documents of type T
|
||||
* and iterate through them in batches.
|
||||
*/
|
||||
public interface BatchedDocumentsIterator<T> {
|
||||
public abstract class BatchedDocumentsIterator<T> {
|
||||
private static final Logger LOGGER = Loggers.getLogger(BatchedDocumentsIterator.class);
|
||||
|
||||
private static final String CONTEXT_ALIVE_DURATION = "5m";
|
||||
private static final int BATCH_SIZE = 10000;
|
||||
|
||||
private final Client client;
|
||||
private final String index;
|
||||
private final ResultsFilterBuilder filterBuilder;
|
||||
private volatile long count;
|
||||
private volatile long totalHits;
|
||||
private volatile String scrollId;
|
||||
private volatile boolean isScrollInitialised;
|
||||
|
||||
public BatchedDocumentsIterator(Client client, String index) {
|
||||
this(client, index, new ResultsFilterBuilder());
|
||||
}
|
||||
|
||||
protected BatchedDocumentsIterator(Client client, String index, QueryBuilder queryBuilder) {
|
||||
this(client, index, new ResultsFilterBuilder(queryBuilder));
|
||||
}
|
||||
|
||||
private BatchedDocumentsIterator(Client client, String index, ResultsFilterBuilder resultsFilterBuilder) {
|
||||
this.client = Objects.requireNonNull(client);
|
||||
this.index = Objects.requireNonNull(index);
|
||||
totalHits = 0;
|
||||
count = 0;
|
||||
filterBuilder = Objects.requireNonNull(resultsFilterBuilder);
|
||||
isScrollInitialised = false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Query documents whose timestamp is within the given time range
|
||||
*
|
||||
|
@ -20,14 +65,31 @@ public interface BatchedDocumentsIterator<T> {
|
|||
* @param endEpochMs the end time as epoch milliseconds (exclusive)
|
||||
* @return the iterator itself
|
||||
*/
|
||||
BatchedDocumentsIterator<T> timeRange(long startEpochMs, long endEpochMs);
|
||||
public BatchedDocumentsIterator<T> timeRange(long startEpochMs, long endEpochMs) {
|
||||
filterBuilder.timeRange(Bucket.TIMESTAMP.getPreferredName(), startEpochMs, endEpochMs);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Include interim documents
|
||||
*
|
||||
* @param interimFieldName Name of the include interim field
|
||||
*/
|
||||
BatchedDocumentsIterator<T> includeInterim(String interimFieldName);
|
||||
public BatchedDocumentsIterator<T> includeInterim(String interimFieldName) {
|
||||
filterBuilder.interim(interimFieldName, true);
|
||||
return this;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns {@code true} if the iteration has more elements.
|
||||
* (In other words, returns {@code true} if {@link #next} would
|
||||
* return an element rather than throwing an exception.)
|
||||
*
|
||||
* @return {@code true} if the iteration has more elements
|
||||
*/
|
||||
public boolean hasNext() {
|
||||
return !isScrollInitialised || count != totalHits;
|
||||
}
|
||||
|
||||
/**
|
||||
* The first time next() is called, the search will be performed and the first
|
||||
|
@ -39,14 +101,66 @@ public interface BatchedDocumentsIterator<T> {
|
|||
* @return a {@code Deque} with the next batch of documents
|
||||
* @throws NoSuchElementException if the iteration has no more elements
|
||||
*/
|
||||
Deque<T> next();
|
||||
public Deque<T> next() {
|
||||
if (!hasNext()) {
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
|
||||
SearchResponse searchResponse;
|
||||
if (scrollId == null) {
|
||||
searchResponse = initScroll();
|
||||
} else {
|
||||
SearchScrollRequest searchScrollRequest = new SearchScrollRequest(scrollId).scroll(CONTEXT_ALIVE_DURATION);
|
||||
searchResponse = client.searchScroll(searchScrollRequest).actionGet();
|
||||
}
|
||||
scrollId = searchResponse.getScrollId();
|
||||
return mapHits(searchResponse);
|
||||
}
|
||||
|
||||
private SearchResponse initScroll() {
|
||||
LOGGER.trace("ES API CALL: search all of type {} from index {}", getType(), index);
|
||||
|
||||
isScrollInitialised = true;
|
||||
|
||||
SearchRequest searchRequest = new SearchRequest(index);
|
||||
searchRequest.types(getType());
|
||||
searchRequest.scroll(CONTEXT_ALIVE_DURATION);
|
||||
searchRequest.source(new SearchSourceBuilder()
|
||||
.size(BATCH_SIZE)
|
||||
.query(filterBuilder.build())
|
||||
.sort(SortBuilders.fieldSort(ElasticsearchMappings.ES_DOC)));
|
||||
|
||||
SearchResponse searchResponse = client.search(searchRequest).actionGet();
|
||||
totalHits = searchResponse.getHits().getTotalHits();
|
||||
scrollId = searchResponse.getScrollId();
|
||||
return searchResponse;
|
||||
}
|
||||
|
||||
private Deque<T> mapHits(SearchResponse searchResponse) {
|
||||
Deque<T> results = new ArrayDeque<>();
|
||||
|
||||
SearchHit[] hits = searchResponse.getHits().getHits();
|
||||
for (SearchHit hit : hits) {
|
||||
T mapped = map(hit);
|
||||
if (mapped != null) {
|
||||
results.add(mapped);
|
||||
}
|
||||
}
|
||||
count += hits.length;
|
||||
|
||||
if (!hasNext() && scrollId != null) {
|
||||
client.prepareClearScroll().setScrollIds(Arrays.asList(scrollId)).get();
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
protected abstract String getType();
|
||||
|
||||
/**
|
||||
* Returns {@code true} if the iteration has more elements.
|
||||
* (In other words, returns {@code true} if {@link #next} would
|
||||
* return an element rather than throwing an exception.)
|
||||
*
|
||||
* @return {@code true} if the iteration has more elements
|
||||
* Maps the search hit to the document type
|
||||
* @param hit
|
||||
* the search hit
|
||||
* @return The mapped document or {@code null} if the mapping failed
|
||||
*/
|
||||
boolean hasNext();
|
||||
protected abstract T map(SearchHit hit);
|
||||
}
|
||||
|
|
|
@ -16,8 +16,8 @@ import org.elasticsearch.xpack.ml.job.results.Influencer;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
class ElasticsearchBatchedInfluencersIterator extends ElasticsearchBatchedResultsIterator<Influencer> {
|
||||
public ElasticsearchBatchedInfluencersIterator(Client client, String jobId) {
|
||||
class BatchedInfluencersIterator extends BatchedResultsIterator<Influencer> {
|
||||
public BatchedInfluencersIterator(Client client, String jobId) {
|
||||
super(client, jobId, Influencer.RESULT_TYPE_VALUE);
|
||||
}
|
||||
|
|
@ -16,9 +16,9 @@ import org.elasticsearch.xpack.ml.job.results.AnomalyRecord;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
class ElasticsearchBatchedRecordsIterator extends ElasticsearchBatchedResultsIterator<AnomalyRecord> {
|
||||
class BatchedRecordsIterator extends BatchedResultsIterator<AnomalyRecord> {
|
||||
|
||||
public ElasticsearchBatchedRecordsIterator(Client client, String jobId) {
|
||||
public BatchedRecordsIterator(Client client, String jobId) {
|
||||
super(client, jobId, AnomalyRecord.RESULT_TYPE_VALUE);
|
||||
}
|
||||
|
|
@ -9,10 +9,10 @@ import org.elasticsearch.client.Client;
|
|||
import org.elasticsearch.index.query.TermsQueryBuilder;
|
||||
import org.elasticsearch.xpack.ml.job.results.Result;
|
||||
|
||||
public abstract class ElasticsearchBatchedResultsIterator<T>
|
||||
extends ElasticsearchBatchedDocumentsIterator<ElasticsearchBatchedResultsIterator.ResultWithIndex<T>> {
|
||||
public abstract class BatchedResultsIterator<T>
|
||||
extends BatchedDocumentsIterator<BatchedResultsIterator.ResultWithIndex<T>> {
|
||||
|
||||
public ElasticsearchBatchedResultsIterator(Client client, String jobId, String resultType) {
|
||||
public BatchedResultsIterator(Client client, String jobId, String resultType) {
|
||||
super(client, AnomalyDetectorsIndex.jobResultsIndexName(jobId),
|
||||
new TermsQueryBuilder(Result.RESULT_TYPE.getPreferredName(), resultType));
|
||||
}
|
|
@ -1,137 +0,0 @@
|
|||
/*
|
||||
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
|
||||
* or more contributor license agreements. Licensed under the Elastic License;
|
||||
* you may not use this file except in compliance with the Elastic License.
|
||||
*/
|
||||
package org.elasticsearch.xpack.ml.job.persistence;
|
||||
|
||||
import org.apache.logging.log4j.Logger;
|
||||
import org.elasticsearch.action.search.SearchRequest;
|
||||
import org.elasticsearch.action.search.SearchResponse;
|
||||
import org.elasticsearch.action.search.SearchScrollRequest;
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.common.logging.Loggers;
|
||||
import org.elasticsearch.index.query.QueryBuilder;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
import org.elasticsearch.search.builder.SearchSourceBuilder;
|
||||
import org.elasticsearch.search.sort.SortBuilders;
|
||||
import org.elasticsearch.xpack.ml.job.results.Bucket;
|
||||
|
||||
import java.util.ArrayDeque;
|
||||
import java.util.Arrays;
|
||||
import java.util.Deque;
|
||||
import java.util.NoSuchElementException;
|
||||
import java.util.Objects;
|
||||
|
||||
abstract class ElasticsearchBatchedDocumentsIterator<T> implements BatchedDocumentsIterator<T> {
|
||||
private static final Logger LOGGER = Loggers.getLogger(ElasticsearchBatchedDocumentsIterator.class);
|
||||
|
||||
private static final String CONTEXT_ALIVE_DURATION = "5m";
|
||||
private static final int BATCH_SIZE = 10000;
|
||||
|
||||
private final Client client;
|
||||
private final String index;
|
||||
private final ResultsFilterBuilder filterBuilder;
|
||||
private volatile long count;
|
||||
private volatile long totalHits;
|
||||
private volatile String scrollId;
|
||||
private volatile boolean isScrollInitialised;
|
||||
|
||||
public ElasticsearchBatchedDocumentsIterator(Client client, String index) {
|
||||
this(client, index, new ResultsFilterBuilder());
|
||||
}
|
||||
|
||||
protected ElasticsearchBatchedDocumentsIterator(Client client, String index, QueryBuilder queryBuilder) {
|
||||
this(client, index, new ResultsFilterBuilder(queryBuilder));
|
||||
}
|
||||
|
||||
private ElasticsearchBatchedDocumentsIterator(Client client, String index, ResultsFilterBuilder resultsFilterBuilder) {
|
||||
this.client = Objects.requireNonNull(client);
|
||||
this.index = Objects.requireNonNull(index);
|
||||
totalHits = 0;
|
||||
count = 0;
|
||||
filterBuilder = Objects.requireNonNull(resultsFilterBuilder);
|
||||
isScrollInitialised = false;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BatchedDocumentsIterator<T> timeRange(long startEpochMs, long endEpochMs) {
|
||||
filterBuilder.timeRange(Bucket.TIMESTAMP.getPreferredName(), startEpochMs, endEpochMs);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public BatchedDocumentsIterator<T> includeInterim(String interimFieldName) {
|
||||
filterBuilder.interim(interimFieldName, true);
|
||||
return this;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return !isScrollInitialised || count != totalHits;
|
||||
}
|
||||
|
||||
@Override
|
||||
public Deque<T> next() {
|
||||
if (!hasNext()) {
|
||||
throw new NoSuchElementException();
|
||||
}
|
||||
|
||||
SearchResponse searchResponse;
|
||||
if (scrollId == null) {
|
||||
searchResponse = initScroll();
|
||||
} else {
|
||||
SearchScrollRequest searchScrollRequest = new SearchScrollRequest(scrollId).scroll(CONTEXT_ALIVE_DURATION);
|
||||
searchResponse = client.searchScroll(searchScrollRequest).actionGet();
|
||||
}
|
||||
scrollId = searchResponse.getScrollId();
|
||||
return mapHits(searchResponse);
|
||||
}
|
||||
|
||||
private SearchResponse initScroll() {
|
||||
LOGGER.trace("ES API CALL: search all of type {} from index {}", getType(), index);
|
||||
|
||||
isScrollInitialised = true;
|
||||
|
||||
SearchRequest searchRequest = new SearchRequest(index);
|
||||
searchRequest.types(getType());
|
||||
searchRequest.scroll(CONTEXT_ALIVE_DURATION);
|
||||
searchRequest.source(new SearchSourceBuilder()
|
||||
.size(BATCH_SIZE)
|
||||
.query(filterBuilder.build())
|
||||
.sort(SortBuilders.fieldSort(ElasticsearchMappings.ES_DOC)));
|
||||
|
||||
SearchResponse searchResponse = client.search(searchRequest).actionGet();
|
||||
totalHits = searchResponse.getHits().getTotalHits();
|
||||
scrollId = searchResponse.getScrollId();
|
||||
return searchResponse;
|
||||
}
|
||||
|
||||
private Deque<T> mapHits(SearchResponse searchResponse) {
|
||||
Deque<T> results = new ArrayDeque<>();
|
||||
|
||||
SearchHit[] hits = searchResponse.getHits().getHits();
|
||||
for (SearchHit hit : hits) {
|
||||
T mapped = map(hit);
|
||||
if (mapped != null) {
|
||||
results.add(mapped);
|
||||
}
|
||||
}
|
||||
count += hits.length;
|
||||
|
||||
if (!hasNext() && scrollId != null) {
|
||||
client.prepareClearScroll().setScrollIds(Arrays.asList(scrollId)).get();
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
||||
protected abstract String getType();
|
||||
|
||||
/**
|
||||
* Maps the search hit to the document type
|
||||
* @param hit
|
||||
* the search hit
|
||||
* @return The mapped document or {@code null} if the mapping failed
|
||||
*/
|
||||
protected abstract T map(SearchHit hit);
|
||||
}
|
|
@ -491,8 +491,8 @@ public class JobProvider {
|
|||
* @param jobId the id of the job for which buckets are requested
|
||||
* @return a bucket {@link BatchedDocumentsIterator}
|
||||
*/
|
||||
public BatchedDocumentsIterator<ElasticsearchBatchedResultsIterator.ResultWithIndex<Bucket>> newBatchedBucketsIterator(String jobId) {
|
||||
return new ElasticsearchBatchedBucketsIterator(client, jobId);
|
||||
public BatchedDocumentsIterator<BatchedResultsIterator.ResultWithIndex<Bucket>> newBatchedBucketsIterator(String jobId) {
|
||||
return new BatchedBucketsIterator(client, jobId);
|
||||
}
|
||||
|
||||
/**
|
||||
|
@ -503,9 +503,9 @@ public class JobProvider {
|
|||
* @param jobId the id of the job for which buckets are requested
|
||||
* @return a record {@link BatchedDocumentsIterator}
|
||||
*/
|
||||
public BatchedDocumentsIterator<ElasticsearchBatchedResultsIterator.ResultWithIndex<AnomalyRecord>>
|
||||
public BatchedDocumentsIterator<BatchedResultsIterator.ResultWithIndex<AnomalyRecord>>
|
||||
newBatchedRecordsIterator(String jobId) {
|
||||
return new ElasticsearchBatchedRecordsIterator(client, jobId);
|
||||
return new BatchedRecordsIterator(client, jobId);
|
||||
}
|
||||
|
||||
// TODO (norelease): Use scroll search instead of multiple searches with increasing from
|
||||
|
@ -761,9 +761,9 @@ public class JobProvider {
|
|||
* @param jobId the id of the job for which influencers are requested
|
||||
* @return an influencer {@link BatchedDocumentsIterator}
|
||||
*/
|
||||
public BatchedDocumentsIterator<ElasticsearchBatchedResultsIterator.ResultWithIndex<Influencer>>
|
||||
public BatchedDocumentsIterator<BatchedResultsIterator.ResultWithIndex<Influencer>>
|
||||
newBatchedInfluencersIterator(String jobId) {
|
||||
return new ElasticsearchBatchedInfluencersIterator(client, jobId);
|
||||
return new BatchedInfluencersIterator(client, jobId);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -10,7 +10,7 @@ import org.elasticsearch.common.logging.Loggers;
|
|||
import org.elasticsearch.xpack.ml.job.AnalysisConfig;
|
||||
import org.elasticsearch.xpack.ml.job.Job;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.BatchedDocumentsIterator;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.ElasticsearchBatchedResultsIterator;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.BatchedResultsIterator;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobRenormalizedResultsPersister;
|
||||
import org.elasticsearch.xpack.ml.job.results.AnomalyRecord;
|
||||
|
@ -101,7 +101,7 @@ public class ScoresUpdater {
|
|||
|
||||
private void updateBuckets(Normalizer normalizer, String quantilesState, long endBucketEpochMs,
|
||||
long windowExtensionMs, int[] counts, boolean perPartitionNormalization) {
|
||||
BatchedDocumentsIterator<ElasticsearchBatchedResultsIterator.ResultWithIndex<Bucket>> bucketsIterator =
|
||||
BatchedDocumentsIterator<BatchedResultsIterator.ResultWithIndex<Bucket>> bucketsIterator =
|
||||
jobProvider.newBatchedBucketsIterator(job.getId())
|
||||
.timeRange(calcNormalizationWindowStart(endBucketEpochMs, windowExtensionMs), endBucketEpochMs);
|
||||
|
||||
|
@ -119,13 +119,13 @@ public class ScoresUpdater {
|
|||
while (bucketsIterator.hasNext()) {
|
||||
// Get a batch of buckets without their records to calculate
|
||||
// how many buckets can be sensibly retrieved
|
||||
Deque<ElasticsearchBatchedResultsIterator.ResultWithIndex<Bucket>> buckets = bucketsIterator.next();
|
||||
Deque<BatchedResultsIterator.ResultWithIndex<Bucket>> buckets = bucketsIterator.next();
|
||||
if (buckets.isEmpty()) {
|
||||
break;
|
||||
}
|
||||
|
||||
while (!buckets.isEmpty()) {
|
||||
ElasticsearchBatchedResultsIterator.ResultWithIndex<Bucket> current = buckets.removeFirst();
|
||||
BatchedResultsIterator.ResultWithIndex<Bucket> current = buckets.removeFirst();
|
||||
Bucket currentBucket = current.result;
|
||||
if (currentBucket.isNormalizable()) {
|
||||
BucketNormalizable bucketNormalizable = new BucketNormalizable(current.result, current.indexName);
|
||||
|
@ -156,13 +156,13 @@ public class ScoresUpdater {
|
|||
}
|
||||
|
||||
private List<RecordNormalizable> bucketRecordsAsNormalizables(long bucketTimeStamp) {
|
||||
BatchedDocumentsIterator<ElasticsearchBatchedResultsIterator.ResultWithIndex<AnomalyRecord>>
|
||||
BatchedDocumentsIterator<BatchedResultsIterator.ResultWithIndex<AnomalyRecord>>
|
||||
recordsIterator = jobProvider.newBatchedRecordsIterator(job.getId())
|
||||
.timeRange(bucketTimeStamp, bucketTimeStamp + 1);
|
||||
|
||||
List<RecordNormalizable> recordNormalizables = new ArrayList<>();
|
||||
while (recordsIterator.hasNext()) {
|
||||
for (ElasticsearchBatchedResultsIterator.ResultWithIndex<AnomalyRecord> record : recordsIterator.next() ) {
|
||||
for (BatchedResultsIterator.ResultWithIndex<AnomalyRecord> record : recordsIterator.next() ) {
|
||||
recordNormalizables.add(new RecordNormalizable(record.result, record.indexName));
|
||||
}
|
||||
}
|
||||
|
@ -211,12 +211,12 @@ public class ScoresUpdater {
|
|||
|
||||
private void updateInfluencers(Normalizer normalizer, String quantilesState, long endBucketEpochMs,
|
||||
long windowExtensionMs, int[] counts) {
|
||||
BatchedDocumentsIterator<ElasticsearchBatchedResultsIterator.ResultWithIndex<Influencer>> influencersIterator =
|
||||
BatchedDocumentsIterator<BatchedResultsIterator.ResultWithIndex<Influencer>> influencersIterator =
|
||||
jobProvider.newBatchedInfluencersIterator(job.getId())
|
||||
.timeRange(calcNormalizationWindowStart(endBucketEpochMs, windowExtensionMs), endBucketEpochMs);
|
||||
|
||||
while (influencersIterator.hasNext()) {
|
||||
Deque<ElasticsearchBatchedResultsIterator.ResultWithIndex<Influencer>> influencers = influencersIterator.next();
|
||||
Deque<BatchedResultsIterator.ResultWithIndex<Influencer>> influencers = influencersIterator.next();
|
||||
if (influencers.isEmpty()) {
|
||||
LOGGER.debug("[{}] No influencers to renormalize for job", job.getId());
|
||||
break;
|
||||
|
|
|
@ -21,12 +21,9 @@ import java.io.IOException;
|
|||
|
||||
public class RestCloseJobAction extends BaseRestHandler {
|
||||
|
||||
private final CloseJobAction.TransportAction closeJobAction;
|
||||
|
||||
@Inject
|
||||
public RestCloseJobAction(Settings settings, RestController controller, CloseJobAction.TransportAction closeJobAction) {
|
||||
public RestCloseJobAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
this.closeJobAction = closeJobAction;
|
||||
controller.registerHandler(RestRequest.Method.POST, MlPlugin.BASE_PATH
|
||||
+ "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_close", this);
|
||||
}
|
||||
|
@ -37,6 +34,6 @@ public class RestCloseJobAction extends BaseRestHandler {
|
|||
if (restRequest.hasParam("close_timeout")) {
|
||||
request.setCloseTimeout(TimeValue.parseTimeValue(restRequest.param("close_timeout"), "close_timeout"));
|
||||
}
|
||||
return channel -> closeJobAction.execute(request, new AcknowledgedRestListener<>(channel));
|
||||
return channel -> client.execute(CloseJobAction.INSTANCE, request, new AcknowledgedRestListener<>(channel));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,12 +20,9 @@ import java.io.IOException;
|
|||
|
||||
public class RestDeleteJobAction extends BaseRestHandler {
|
||||
|
||||
private final DeleteJobAction.TransportAction transportDeleteJobAction;
|
||||
|
||||
@Inject
|
||||
public RestDeleteJobAction(Settings settings, RestController controller, DeleteJobAction.TransportAction transportDeleteJobAction) {
|
||||
public RestDeleteJobAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
this.transportDeleteJobAction = transportDeleteJobAction;
|
||||
controller.registerHandler(RestRequest.Method.DELETE, MlPlugin.BASE_PATH
|
||||
+ "anomaly_detectors/{" + Job.ID.getPreferredName() + "}", this);
|
||||
}
|
||||
|
@ -33,6 +30,6 @@ public class RestDeleteJobAction extends BaseRestHandler {
|
|||
@Override
|
||||
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
|
||||
DeleteJobAction.Request deleteJobRequest = new DeleteJobAction.Request(restRequest.param(Job.ID.getPreferredName()));
|
||||
return channel -> transportDeleteJobAction.execute(deleteJobRequest, new AcknowledgedRestListener<>(channel));
|
||||
return channel -> client.execute(DeleteJobAction.INSTANCE, deleteJobRequest, new AcknowledgedRestListener<>(channel));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -26,13 +26,9 @@ public class RestFlushJobAction extends BaseRestHandler {
|
|||
private final String DEFAULT_END = "";
|
||||
private final String DEFAULT_ADVANCE_TIME = "";
|
||||
|
||||
private final FlushJobAction.TransportAction flushJobAction;
|
||||
|
||||
@Inject
|
||||
public RestFlushJobAction(Settings settings, RestController controller,
|
||||
FlushJobAction.TransportAction flushJobAction) {
|
||||
public RestFlushJobAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
this.flushJobAction = flushJobAction;
|
||||
controller.registerHandler(RestRequest.Method.POST, MlPlugin.BASE_PATH
|
||||
+ "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_flush", this);
|
||||
}
|
||||
|
@ -53,6 +49,6 @@ public class RestFlushJobAction extends BaseRestHandler {
|
|||
request.setAdvanceTime(restRequest.param(FlushJobAction.Request.ADVANCE_TIME.getPreferredName(), DEFAULT_ADVANCE_TIME));
|
||||
}
|
||||
|
||||
return channel -> flushJobAction.execute(request, new AcknowledgedRestListener<>(channel));
|
||||
return channel -> client.execute(FlushJobAction.INSTANCE, request, new AcknowledgedRestListener<>(channel));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,12 +20,9 @@ import java.io.IOException;
|
|||
|
||||
public class RestGetJobsAction extends BaseRestHandler {
|
||||
|
||||
private final GetJobsAction.TransportAction transportGetJobAction;
|
||||
|
||||
@Inject
|
||||
public RestGetJobsAction(Settings settings, RestController controller, GetJobsAction.TransportAction transportGetJobAction) {
|
||||
public RestGetJobsAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
this.transportGetJobAction = transportGetJobAction;
|
||||
|
||||
controller.registerHandler(RestRequest.Method.GET, MlPlugin.BASE_PATH
|
||||
+ "anomaly_detectors/{" + Job.ID.getPreferredName() + "}", this);
|
||||
|
@ -34,6 +31,6 @@ public class RestGetJobsAction extends BaseRestHandler {
|
|||
@Override
|
||||
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
|
||||
GetJobsAction.Request request = new GetJobsAction.Request(restRequest.param(Job.ID.getPreferredName()));
|
||||
return channel -> transportGetJobAction.execute(request, new RestToXContentListener<>(channel));
|
||||
return channel -> client.execute(GetJobsAction.INSTANCE, request, new RestToXContentListener<>(channel));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,14 +20,9 @@ import java.io.IOException;
|
|||
|
||||
public class RestGetJobsStatsAction extends BaseRestHandler {
|
||||
|
||||
private final GetJobsStatsAction.TransportAction transportGetJobsStatsAction;
|
||||
|
||||
@Inject
|
||||
public RestGetJobsStatsAction(Settings settings, RestController controller,
|
||||
GetJobsStatsAction.TransportAction transportGetJobsStatsAction) {
|
||||
public RestGetJobsStatsAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
this.transportGetJobsStatsAction = transportGetJobsStatsAction;
|
||||
|
||||
controller.registerHandler(RestRequest.Method.GET, MlPlugin.BASE_PATH
|
||||
+ "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_stats", this);
|
||||
}
|
||||
|
@ -35,6 +30,6 @@ public class RestGetJobsStatsAction extends BaseRestHandler {
|
|||
@Override
|
||||
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
|
||||
GetJobsStatsAction.Request request = new GetJobsStatsAction.Request(restRequest.param(Job.ID.getPreferredName()));
|
||||
return channel -> transportGetJobsStatsAction.execute(request, new RestToXContentListener<>(channel));
|
||||
return channel -> client.execute(GetJobsStatsAction.INSTANCE, request, new RestToXContentListener<>(channel));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -14,20 +14,17 @@ import org.elasticsearch.rest.RestController;
|
|||
import org.elasticsearch.rest.RestRequest;
|
||||
import org.elasticsearch.rest.action.AcknowledgedRestListener;
|
||||
import org.elasticsearch.xpack.ml.MlPlugin;
|
||||
import org.elasticsearch.xpack.ml.action.PostDataAction;
|
||||
import org.elasticsearch.xpack.ml.action.OpenJobAction;
|
||||
import org.elasticsearch.xpack.ml.action.PostDataAction;
|
||||
import org.elasticsearch.xpack.ml.job.Job;
|
||||
|
||||
import java.io.IOException;
|
||||
|
||||
public class RestOpenJobAction extends BaseRestHandler {
|
||||
|
||||
private final OpenJobAction.TransportAction openJobAction;
|
||||
|
||||
@Inject
|
||||
public RestOpenJobAction(Settings settings, RestController controller, OpenJobAction.TransportAction openJobAction) {
|
||||
public RestOpenJobAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
this.openJobAction = openJobAction;
|
||||
controller.registerHandler(RestRequest.Method.POST, MlPlugin.BASE_PATH
|
||||
+ "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_open", this);
|
||||
}
|
||||
|
@ -41,6 +38,6 @@ public class RestOpenJobAction extends BaseRestHandler {
|
|||
restRequest.param(OpenJobAction.Request.OPEN_TIMEOUT.getPreferredName()),
|
||||
OpenJobAction.Request.OPEN_TIMEOUT.getPreferredName()));
|
||||
}
|
||||
return channel -> openJobAction.execute(request, new AcknowledgedRestListener<>(channel));
|
||||
return channel -> client.execute(OpenJobAction.INSTANCE, request, new AcknowledgedRestListener<>(channel));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -24,12 +24,9 @@ public class RestPostDataAction extends BaseRestHandler {
|
|||
private static final String DEFAULT_RESET_START = "";
|
||||
private static final String DEFAULT_RESET_END = "";
|
||||
|
||||
private final PostDataAction.TransportAction transportPostDataAction;
|
||||
|
||||
@Inject
|
||||
public RestPostDataAction(Settings settings, RestController controller, PostDataAction.TransportAction transportPostDataAction) {
|
||||
public RestPostDataAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
this.transportPostDataAction = transportPostDataAction;
|
||||
controller.registerHandler(RestRequest.Method.POST, MlPlugin.BASE_PATH
|
||||
+ "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_data", this);
|
||||
}
|
||||
|
@ -43,6 +40,6 @@ public class RestPostDataAction extends BaseRestHandler {
|
|||
request.setResetEnd(restRequest.param(PostDataAction.Request.RESET_END.getPreferredName(), DEFAULT_RESET_END));
|
||||
request.setContent(restRequest.content());
|
||||
|
||||
return channel -> transportPostDataAction.execute(request, new RestStatusToXContentListener<>(channel));
|
||||
return channel -> client.execute(PostDataAction.INSTANCE, request, new RestStatusToXContentListener<>(channel));
|
||||
}
|
||||
}
|
|
@ -21,12 +21,9 @@ import java.io.IOException;
|
|||
|
||||
public class RestPutJobAction extends BaseRestHandler {
|
||||
|
||||
private final PutJobAction.TransportAction transportPutJobAction;
|
||||
|
||||
@Inject
|
||||
public RestPutJobAction(Settings settings, RestController controller, PutJobAction.TransportAction transportPutJobAction) {
|
||||
public RestPutJobAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
this.transportPutJobAction = transportPutJobAction;
|
||||
controller.registerHandler(RestRequest.Method.PUT,
|
||||
MlPlugin.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}", this);
|
||||
}
|
||||
|
@ -38,7 +35,7 @@ public class RestPutJobAction extends BaseRestHandler {
|
|||
PutJobAction.Request putJobRequest = PutJobAction.Request.parseRequest(jobId, parser);
|
||||
boolean overwrite = restRequest.paramAsBoolean("overwrite", false);
|
||||
putJobRequest.setOverwrite(overwrite);
|
||||
return channel -> transportPutJobAction.execute(putJobRequest, new RestToXContentListener<>(channel));
|
||||
return channel -> client.execute(PutJobAction.INSTANCE, putJobRequest, new RestToXContentListener<>(channel));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -20,12 +20,9 @@ import java.io.IOException;
|
|||
|
||||
public class RestDeleteListAction extends BaseRestHandler {
|
||||
|
||||
private final DeleteListAction.TransportAction transportAction;
|
||||
|
||||
@Inject
|
||||
public RestDeleteListAction(Settings settings, RestController controller, DeleteListAction.TransportAction transportAction) {
|
||||
public RestDeleteListAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
this.transportAction = transportAction;
|
||||
controller.registerHandler(RestRequest.Method.DELETE,
|
||||
MlPlugin.BASE_PATH + "lists/{" + Request.LIST_ID.getPreferredName() + "}", this);
|
||||
}
|
||||
|
@ -33,7 +30,7 @@ public class RestDeleteListAction extends BaseRestHandler {
|
|||
@Override
|
||||
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
|
||||
Request request = new Request(restRequest.param(Request.LIST_ID.getPreferredName()));
|
||||
return channel -> transportAction.execute(request, new AcknowledgedRestListener<>(channel));
|
||||
return channel -> client.execute(DeleteListAction.INSTANCE, request, new AcknowledgedRestListener<>(channel));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -22,12 +22,9 @@ import java.io.IOException;
|
|||
|
||||
public class RestGetListAction extends BaseRestHandler {
|
||||
|
||||
private final GetListAction.TransportAction transportGetListAction;
|
||||
|
||||
@Inject
|
||||
public RestGetListAction(Settings settings, RestController controller, GetListAction.TransportAction transportGetListAction) {
|
||||
public RestGetListAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
this.transportGetListAction = transportGetListAction;
|
||||
controller.registerHandler(RestRequest.Method.GET, MlPlugin.BASE_PATH + "lists/{" + ListDocument.ID.getPreferredName() + "}",
|
||||
this);
|
||||
controller.registerHandler(RestRequest.Method.GET, MlPlugin.BASE_PATH + "lists/", this);
|
||||
|
@ -46,7 +43,7 @@ public class RestGetListAction extends BaseRestHandler {
|
|||
getListRequest.setPageParams(new PageParams(restRequest.paramAsInt(PageParams.FROM.getPreferredName(), PageParams.DEFAULT_FROM),
|
||||
restRequest.paramAsInt(PageParams.SIZE.getPreferredName(), PageParams.DEFAULT_SIZE)));
|
||||
}
|
||||
return channel -> transportGetListAction.execute(getListRequest, new RestStatusToXContentListener<>(channel));
|
||||
return channel -> client.execute(GetListAction.INSTANCE, getListRequest, new RestStatusToXContentListener<>(channel));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -20,12 +20,9 @@ import java.io.IOException;
|
|||
|
||||
public class RestPutListAction extends BaseRestHandler {
|
||||
|
||||
private final PutListAction.TransportAction transportCreateListAction;
|
||||
|
||||
@Inject
|
||||
public RestPutListAction(Settings settings, RestController controller, PutListAction.TransportAction transportCreateListAction) {
|
||||
public RestPutListAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
this.transportCreateListAction = transportCreateListAction;
|
||||
controller.registerHandler(RestRequest.Method.PUT, MlPlugin.BASE_PATH + "lists", this);
|
||||
}
|
||||
|
||||
|
@ -33,7 +30,7 @@ public class RestPutListAction extends BaseRestHandler {
|
|||
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
|
||||
XContentParser parser = restRequest.contentOrSourceParamParser();
|
||||
PutListAction.Request putListRequest = PutListAction.Request.parseRequest(parser);
|
||||
return channel -> transportCreateListAction.execute(putListRequest, new AcknowledgedRestListener<>(channel));
|
||||
return channel -> client.execute(PutListAction.INSTANCE, putListRequest, new AcknowledgedRestListener<>(channel));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -21,13 +21,9 @@ import java.io.IOException;
|
|||
|
||||
public class RestDeleteModelSnapshotAction extends BaseRestHandler {
|
||||
|
||||
private final DeleteModelSnapshotAction.TransportAction transportAction;
|
||||
|
||||
@Inject
|
||||
public RestDeleteModelSnapshotAction(Settings settings, RestController controller,
|
||||
DeleteModelSnapshotAction.TransportAction transportAction) {
|
||||
public RestDeleteModelSnapshotAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
this.transportAction = transportAction;
|
||||
controller.registerHandler(RestRequest.Method.DELETE, MlPlugin.BASE_PATH + "anomaly_detectors/{"
|
||||
+ Job.ID.getPreferredName() + "}/model_snapshots/{" + ModelSnapshot.SNAPSHOT_ID.getPreferredName() + "}", this);
|
||||
}
|
||||
|
@ -38,6 +34,6 @@ public class RestDeleteModelSnapshotAction extends BaseRestHandler {
|
|||
restRequest.param(Job.ID.getPreferredName()),
|
||||
restRequest.param(ModelSnapshot.SNAPSHOT_ID.getPreferredName()));
|
||||
|
||||
return channel -> transportAction.execute(deleteModelSnapshot, new AcknowledgedRestListener<>(channel));
|
||||
return channel -> client.execute(DeleteModelSnapshotAction.INSTANCE, deleteModelSnapshot, new AcknowledgedRestListener<>(channel));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -31,13 +31,9 @@ public class RestGetModelSnapshotsAction extends BaseRestHandler {
|
|||
private final String DEFAULT_DESCRIPTION = null;
|
||||
private final boolean DEFAULT_DESC_ORDER = true;
|
||||
|
||||
private final GetModelSnapshotsAction.TransportAction transportGetModelSnapshotsAction;
|
||||
|
||||
@Inject
|
||||
public RestGetModelSnapshotsAction(Settings settings, RestController controller,
|
||||
GetModelSnapshotsAction.TransportAction transportGetModelSnapshotsAction) {
|
||||
public RestGetModelSnapshotsAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
this.transportGetModelSnapshotsAction = transportGetModelSnapshotsAction;
|
||||
controller.registerHandler(RestRequest.Method.GET, MlPlugin.BASE_PATH + "anomaly_detectors/{"
|
||||
+ Job.ID.getPreferredName() + "}/model_snapshots/", this);
|
||||
// endpoints that support body parameters must also accept POST
|
||||
|
@ -70,6 +66,6 @@ public class RestGetModelSnapshotsAction extends BaseRestHandler {
|
|||
restRequest.paramAsInt(PageParams.SIZE.getPreferredName(), PageParams.DEFAULT_SIZE)));
|
||||
}
|
||||
|
||||
return channel -> transportGetModelSnapshotsAction.execute(getModelSnapshots, new RestToXContentListener<>(channel));
|
||||
return channel -> client.execute(GetModelSnapshotsAction.INSTANCE, getModelSnapshots, new RestToXContentListener<>(channel));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,18 +21,14 @@ import java.io.IOException;
|
|||
|
||||
public class RestRevertModelSnapshotAction extends BaseRestHandler {
|
||||
|
||||
private final RevertModelSnapshotAction.TransportAction transportAction;
|
||||
|
||||
private final String TIME_DEFAULT = null;
|
||||
private final String SNAPSHOT_ID_DEFAULT = null;
|
||||
private final String DESCRIPTION_DEFAULT = null;
|
||||
private final boolean DELETE_INTERVENING_DEFAULT = false;
|
||||
|
||||
@Inject
|
||||
public RestRevertModelSnapshotAction(Settings settings, RestController controller,
|
||||
RevertModelSnapshotAction.TransportAction transportAction) {
|
||||
public RestRevertModelSnapshotAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
this.transportAction = transportAction;
|
||||
controller.registerHandler(RestRequest.Method.POST,
|
||||
MlPlugin.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/model_snapshots/_revert",
|
||||
this);
|
||||
|
@ -54,6 +50,6 @@ public class RestRevertModelSnapshotAction extends BaseRestHandler {
|
|||
request.setDeleteInterveningResults(restRequest
|
||||
.paramAsBoolean(RevertModelSnapshotAction.Request.DELETE_INTERVENING.getPreferredName(), DELETE_INTERVENING_DEFAULT));
|
||||
}
|
||||
return channel -> transportAction.execute(request, new RestStatusToXContentListener<>(channel));
|
||||
return channel -> client.execute(RevertModelSnapshotAction.INSTANCE, request, new RestStatusToXContentListener<>(channel));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,14 +22,9 @@ import java.io.IOException;
|
|||
|
||||
public class RestUpdateModelSnapshotAction extends BaseRestHandler {
|
||||
|
||||
private final UpdateModelSnapshotAction.TransportAction transportAction;
|
||||
|
||||
@Inject
|
||||
public RestUpdateModelSnapshotAction(Settings settings, RestController controller,
|
||||
UpdateModelSnapshotAction.TransportAction transportAction) {
|
||||
public RestUpdateModelSnapshotAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
this.transportAction = transportAction;
|
||||
|
||||
controller.registerHandler(RestRequest.Method.POST, MlPlugin.BASE_PATH + "anomaly_detectors/{"
|
||||
+ Job.ID.getPreferredName() + "}/model_snapshots/{" + ModelSnapshot.SNAPSHOT_ID +"}/_update",
|
||||
this);
|
||||
|
@ -43,6 +38,7 @@ public class RestUpdateModelSnapshotAction extends BaseRestHandler {
|
|||
restRequest.param(ModelSnapshot.SNAPSHOT_ID.getPreferredName()),
|
||||
parser);
|
||||
|
||||
return channel -> transportAction.execute(getModelSnapshots, new RestStatusToXContentListener<>(channel));
|
||||
return channel ->
|
||||
client.execute(UpdateModelSnapshotAction.INSTANCE, getModelSnapshots, new RestStatusToXContentListener<>(channel));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,12 +23,9 @@ import java.io.IOException;
|
|||
|
||||
public class RestGetBucketsAction extends BaseRestHandler {
|
||||
|
||||
private final GetBucketsAction.TransportAction transportAction;
|
||||
|
||||
@Inject
|
||||
public RestGetBucketsAction(Settings settings, RestController controller, GetBucketsAction.TransportAction transportAction) {
|
||||
public RestGetBucketsAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
this.transportAction = transportAction;
|
||||
controller.registerHandler(RestRequest.Method.GET,
|
||||
MlPlugin.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName()
|
||||
+ "}/results/buckets/{" + Bucket.TIMESTAMP.getPreferredName() + "}", this);
|
||||
|
@ -90,6 +87,6 @@ public class RestGetBucketsAction extends BaseRestHandler {
|
|||
request.setIncludeInterim(restRequest.paramAsBoolean(GetBucketsAction.Request.INCLUDE_INTERIM.getPreferredName(), false));
|
||||
}
|
||||
|
||||
return channel -> transportAction.execute(request, new RestToXContentListener<>(channel));
|
||||
return channel -> client.execute(GetBucketsAction.INSTANCE, request, new RestToXContentListener<>(channel));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -25,13 +25,9 @@ import java.io.IOException;
|
|||
|
||||
public class RestGetCategoriesAction extends BaseRestHandler {
|
||||
|
||||
private final GetCategoriesDefinitionAction.TransportAction transportAction;
|
||||
|
||||
@Inject
|
||||
public RestGetCategoriesAction(Settings settings, RestController controller,
|
||||
GetCategoriesDefinitionAction.TransportAction transportAction) {
|
||||
public RestGetCategoriesAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
this.transportAction = transportAction;
|
||||
controller.registerHandler(RestRequest.Method.GET,
|
||||
MlPlugin.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/categorydefinitions/{"
|
||||
+ Request.CATEGORY_ID.getPreferredName() + "}", this);
|
||||
|
@ -73,7 +69,7 @@ public class RestGetCategoriesAction extends BaseRestHandler {
|
|||
}
|
||||
}
|
||||
|
||||
return channel -> transportAction.execute(request, new RestToXContentListener<>(channel));
|
||||
return channel -> client.execute(GetCategoriesDefinitionAction.INSTANCE, request, new RestToXContentListener<>(channel));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -23,12 +23,9 @@ import java.io.IOException;
|
|||
|
||||
public class RestGetInfluencersAction extends BaseRestHandler {
|
||||
|
||||
private final GetInfluencersAction.TransportAction transportAction;
|
||||
|
||||
@Inject
|
||||
public RestGetInfluencersAction(Settings settings, RestController controller, GetInfluencersAction.TransportAction transportAction) {
|
||||
public RestGetInfluencersAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
this.transportAction = transportAction;
|
||||
controller.registerHandler(RestRequest.Method.GET,
|
||||
MlPlugin.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/results/influencers", this);
|
||||
// endpoints that support body parameters must also accept POST
|
||||
|
@ -59,6 +56,6 @@ public class RestGetInfluencersAction extends BaseRestHandler {
|
|||
request.setDecending(restRequest.paramAsBoolean(GetInfluencersAction.Request.DESCENDING_SORT.getPreferredName(), true));
|
||||
}
|
||||
|
||||
return channel -> transportAction.execute(request, new RestToXContentListener<>(channel));
|
||||
return channel -> client.execute(GetInfluencersAction.INSTANCE, request, new RestToXContentListener<>(channel));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -23,12 +23,9 @@ import java.io.IOException;
|
|||
|
||||
public class RestGetRecordsAction extends BaseRestHandler {
|
||||
|
||||
private final GetRecordsAction.TransportAction transportAction;
|
||||
|
||||
@Inject
|
||||
public RestGetRecordsAction(Settings settings, RestController controller, GetRecordsAction.TransportAction transportAction) {
|
||||
public RestGetRecordsAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
this.transportAction = transportAction;
|
||||
controller.registerHandler(RestRequest.Method.GET, MlPlugin.BASE_PATH + "anomaly_detectors/{"
|
||||
+ Job.ID.getPreferredName() + "}/results/records", this);
|
||||
controller.registerHandler(RestRequest.Method.POST, MlPlugin.BASE_PATH + "anomaly_detectors/{"
|
||||
|
@ -63,6 +60,6 @@ public class RestGetRecordsAction extends BaseRestHandler {
|
|||
}
|
||||
}
|
||||
|
||||
return channel -> transportAction.execute(request, new RestToXContentListener<>(channel));
|
||||
return channel -> client.execute(GetRecordsAction.INSTANCE, request, new RestToXContentListener<>(channel));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,13 +20,9 @@ import java.io.IOException;
|
|||
|
||||
public class RestDeleteSchedulerAction extends BaseRestHandler {
|
||||
|
||||
private final DeleteSchedulerAction.TransportAction transportDeleteSchedulerAction;
|
||||
|
||||
@Inject
|
||||
public RestDeleteSchedulerAction(Settings settings, RestController controller,
|
||||
DeleteSchedulerAction.TransportAction transportDeleteSchedulerAction) {
|
||||
public RestDeleteSchedulerAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
this.transportDeleteSchedulerAction = transportDeleteSchedulerAction;
|
||||
controller.registerHandler(RestRequest.Method.DELETE, MlPlugin.BASE_PATH + "schedulers/{"
|
||||
+ SchedulerConfig.ID.getPreferredName() + "}", this);
|
||||
}
|
||||
|
@ -35,7 +31,7 @@ public class RestDeleteSchedulerAction extends BaseRestHandler {
|
|||
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
|
||||
String schedulerId = restRequest.param(SchedulerConfig.ID.getPreferredName());
|
||||
DeleteSchedulerAction.Request deleteSchedulerRequest = new DeleteSchedulerAction.Request(schedulerId);
|
||||
return channel -> transportDeleteSchedulerAction.execute(deleteSchedulerRequest, new AcknowledgedRestListener<>(channel));
|
||||
return channel -> client.execute(DeleteSchedulerAction.INSTANCE, deleteSchedulerRequest, new AcknowledgedRestListener<>(channel));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -20,14 +20,9 @@ import java.io.IOException;
|
|||
|
||||
public class RestGetSchedulersAction extends BaseRestHandler {
|
||||
|
||||
private final GetSchedulersAction.TransportAction transportGetSchedulersAction;
|
||||
|
||||
@Inject
|
||||
public RestGetSchedulersAction(Settings settings, RestController controller,
|
||||
GetSchedulersAction.TransportAction transportGetSchedulersAction) {
|
||||
public RestGetSchedulersAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
this.transportGetSchedulersAction = transportGetSchedulersAction;
|
||||
|
||||
controller.registerHandler(RestRequest.Method.GET, MlPlugin.BASE_PATH
|
||||
+ "schedulers/{" + SchedulerConfig.ID.getPreferredName() + "}", this);
|
||||
}
|
||||
|
@ -35,6 +30,6 @@ public class RestGetSchedulersAction extends BaseRestHandler {
|
|||
@Override
|
||||
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
|
||||
GetSchedulersAction.Request request = new GetSchedulersAction.Request(restRequest.param(SchedulerConfig.ID.getPreferredName()));
|
||||
return channel -> transportGetSchedulersAction.execute(request, new RestToXContentListener<>(channel));
|
||||
return channel -> client.execute(GetSchedulersAction.INSTANCE, request, new RestToXContentListener<>(channel));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,14 +20,9 @@ import java.io.IOException;
|
|||
|
||||
public class RestGetSchedulersStatsAction extends BaseRestHandler {
|
||||
|
||||
private final GetSchedulersStatsAction.TransportAction transportGetSchedulersStatsAction;
|
||||
|
||||
@Inject
|
||||
public RestGetSchedulersStatsAction(Settings settings, RestController controller,
|
||||
GetSchedulersStatsAction.TransportAction transportGetSchedulersStatsAction) {
|
||||
public RestGetSchedulersStatsAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
this.transportGetSchedulersStatsAction = transportGetSchedulersStatsAction;
|
||||
|
||||
controller.registerHandler(RestRequest.Method.GET, MlPlugin.BASE_PATH
|
||||
+ "schedulers/{" + SchedulerConfig.ID.getPreferredName() + "}/_stats", this);
|
||||
}
|
||||
|
@ -36,6 +31,6 @@ public class RestGetSchedulersStatsAction extends BaseRestHandler {
|
|||
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
|
||||
GetSchedulersStatsAction.Request request = new GetSchedulersStatsAction.Request(
|
||||
restRequest.param(SchedulerConfig.ID.getPreferredName()));
|
||||
return channel -> transportGetSchedulersStatsAction.execute(request, new RestToXContentListener<>(channel));
|
||||
return channel -> client.execute(GetSchedulersStatsAction.INSTANCE, request, new RestToXContentListener<>(channel));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -21,13 +21,9 @@ import java.io.IOException;
|
|||
|
||||
public class RestPutSchedulerAction extends BaseRestHandler {
|
||||
|
||||
private final PutSchedulerAction.TransportAction transportPutSchedulerAction;
|
||||
|
||||
@Inject
|
||||
public RestPutSchedulerAction(Settings settings, RestController controller,
|
||||
PutSchedulerAction.TransportAction transportPutSchedulerAction) {
|
||||
public RestPutSchedulerAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
this.transportPutSchedulerAction = transportPutSchedulerAction;
|
||||
controller.registerHandler(RestRequest.Method.PUT, MlPlugin.BASE_PATH + "schedulers/{"
|
||||
+ SchedulerConfig.ID.getPreferredName() + "}", this);
|
||||
}
|
||||
|
@ -37,7 +33,7 @@ public class RestPutSchedulerAction extends BaseRestHandler {
|
|||
String schedulerId = restRequest.param(SchedulerConfig.ID.getPreferredName());
|
||||
XContentParser parser = restRequest.contentParser();
|
||||
PutSchedulerAction.Request putSchedulerRequest = PutSchedulerAction.Request.parseRequest(schedulerId, parser);
|
||||
return channel -> transportPutSchedulerAction.execute(putSchedulerRequest, new RestToXContentListener<>(channel));
|
||||
return channel -> client.execute(PutSchedulerAction.INSTANCE, putSchedulerRequest, new RestToXContentListener<>(channel));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -21,13 +21,9 @@ import java.io.IOException;
|
|||
|
||||
public class RestStopSchedulerAction extends BaseRestHandler {
|
||||
|
||||
private final StopSchedulerAction.TransportAction transportJobSchedulerAction;
|
||||
|
||||
@Inject
|
||||
public RestStopSchedulerAction(Settings settings, RestController controller,
|
||||
StopSchedulerAction.TransportAction transportJobSchedulerAction) {
|
||||
public RestStopSchedulerAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
this.transportJobSchedulerAction = transportJobSchedulerAction;
|
||||
controller.registerHandler(RestRequest.Method.POST, MlPlugin.BASE_PATH + "schedulers/{"
|
||||
+ SchedulerConfig.ID.getPreferredName() + "}/_stop", this);
|
||||
}
|
||||
|
@ -39,6 +35,6 @@ public class RestStopSchedulerAction extends BaseRestHandler {
|
|||
if (restRequest.hasParam("stop_timeout")) {
|
||||
jobSchedulerRequest.setStopTimeout(TimeValue.parseTimeValue(restRequest.param("stop_timeout"), "stop_timeout"));
|
||||
}
|
||||
return channel -> transportJobSchedulerAction.execute(jobSchedulerRequest, new AcknowledgedRestListener<>(channel));
|
||||
return channel -> client.execute(StopSchedulerAction.INSTANCE, jobSchedulerRequest, new AcknowledgedRestListener<>(channel));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -20,13 +20,9 @@ import java.io.IOException;
|
|||
|
||||
public class RestValidateDetectorAction extends BaseRestHandler {
|
||||
|
||||
private ValidateDetectorAction.TransportAction transportValidateAction;
|
||||
|
||||
@Inject
|
||||
public RestValidateDetectorAction(Settings settings, RestController controller,
|
||||
ValidateDetectorAction.TransportAction transportValidateAction) {
|
||||
public RestValidateDetectorAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
this.transportValidateAction = transportValidateAction;
|
||||
controller.registerHandler(RestRequest.Method.POST, MlPlugin.BASE_PATH + "_validate/detector", this);
|
||||
}
|
||||
|
||||
|
@ -34,8 +30,8 @@ public class RestValidateDetectorAction extends BaseRestHandler {
|
|||
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
|
||||
XContentParser parser = restRequest.contentOrSourceParamParser();
|
||||
ValidateDetectorAction.Request validateDetectorRequest = ValidateDetectorAction.Request.parseRequest(parser);
|
||||
return channel -> transportValidateAction.execute(validateDetectorRequest,
|
||||
new AcknowledgedRestListener<ValidateDetectorAction.Response>(channel));
|
||||
return channel ->
|
||||
client.execute(ValidateDetectorAction.INSTANCE, validateDetectorRequest, new AcknowledgedRestListener<>(channel));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -20,13 +20,9 @@ import java.io.IOException;
|
|||
|
||||
public class RestValidateTransformAction extends BaseRestHandler {
|
||||
|
||||
private ValidateTransformAction.TransportAction transportValidateAction;
|
||||
|
||||
@Inject
|
||||
public RestValidateTransformAction(Settings settings, RestController controller,
|
||||
ValidateTransformAction.TransportAction transportValidateAction) {
|
||||
public RestValidateTransformAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
this.transportValidateAction = transportValidateAction;
|
||||
controller.registerHandler(RestRequest.Method.POST, MlPlugin.BASE_PATH + "_validate/transform", this);
|
||||
}
|
||||
|
||||
|
@ -34,8 +30,8 @@ public class RestValidateTransformAction extends BaseRestHandler {
|
|||
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
|
||||
XContentParser parser = restRequest.contentOrSourceParamParser();
|
||||
ValidateTransformAction.Request validateDetectorRequest = ValidateTransformAction.Request.parseRequest(parser);
|
||||
return channel -> transportValidateAction.execute(validateDetectorRequest,
|
||||
new AcknowledgedRestListener<ValidateTransformAction.Response>(channel));
|
||||
return channel ->
|
||||
client.execute(ValidateTransformAction.INSTANCE, validateDetectorRequest, new AcknowledgedRestListener<>(channel));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -20,13 +20,9 @@ import java.io.IOException;
|
|||
|
||||
public class RestValidateTransformsAction extends BaseRestHandler {
|
||||
|
||||
private ValidateTransformsAction.TransportAction transportValidateAction;
|
||||
|
||||
@Inject
|
||||
public RestValidateTransformsAction(Settings settings, RestController controller,
|
||||
ValidateTransformsAction.TransportAction transportValidateAction) {
|
||||
public RestValidateTransformsAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
this.transportValidateAction = transportValidateAction;
|
||||
controller.registerHandler(RestRequest.Method.POST, MlPlugin.BASE_PATH + "_validate/transforms", this);
|
||||
}
|
||||
|
||||
|
@ -34,7 +30,8 @@ public class RestValidateTransformsAction extends BaseRestHandler {
|
|||
protected RestChannelConsumer prepareRequest(RestRequest restRequest, NodeClient client) throws IOException {
|
||||
XContentParser parser = restRequest.contentOrSourceParamParser();
|
||||
ValidateTransformsAction.Request validateDetectorRequest = ValidateTransformsAction.Request.PARSER.apply(parser, null);
|
||||
return channel -> transportValidateAction.execute(validateDetectorRequest, new AcknowledgedRestListener<>(channel));
|
||||
return channel ->
|
||||
client.execute(ValidateTransformsAction.INSTANCE, validateDetectorRequest, new AcknowledgedRestListener<>(channel));
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -27,8 +27,8 @@ public class CppLogMessageHandlerTests extends ESTestCase {
|
|||
+ "/var/folders/k5/5sqcdlps5sg3cvlp783gcz740000h0/T/controller_log_784\",\"class\":\"ml\","
|
||||
+ "\"method\":\"core::CLogger::reconfigureLogToNamedPipe\",\"file\":\"CLogger.cc\",\"line\":333}\n"
|
||||
+ "{\"logger\":\"controller\",\"timestamp\":1478261151445,\"level\":\"INFO\",\"pid\":10211,\"thread\":\"0x7fff7d2a8000\","
|
||||
+ "\"message\":\"controller (64 bit): Version based on 6.5.0 (Build DEVELOPMENT BUILD by dave) "
|
||||
+ "Copyright (c) 2016 Elasticsearch BV\",\"method\":\"main\",\"file\":\"Main.cc\",\"line\":123}\n"
|
||||
+ "\"message\":\"controller (64 bit): Version based on 6.0.0-alpha1 (Build b0d6ef8819418c) "
|
||||
+ "Copyright (c) 2017 Elasticsearch BV\",\"method\":\"main\",\"file\":\"Main.cc\",\"line\":123}\n"
|
||||
+ "{\"logger\":\"controller\",\"timestamp\":1478261169065,\"level\":\"ERROR\",\"pid\":10211,\"thread\":\"0x7fff7d2a8000\","
|
||||
+ "\"message\":\"Did not understand verb 'a'\",\"class\":\"ml\","
|
||||
+ "\"method\":\"controller::CCommandProcessor::handleCommand\",\"file\":\"CCommandProcessor.cc\",\"line\":100}\n"
|
||||
|
@ -43,6 +43,8 @@ public class CppLogMessageHandlerTests extends ESTestCase {
|
|||
|
||||
assertTrue(handler.hasLogStreamEnded());
|
||||
assertEquals(10211L, handler.getPid(Duration.ofMillis(1)));
|
||||
assertEquals("controller (64 bit): Version based on 6.0.0-alpha1 (Build b0d6ef8819418c) "
|
||||
+ "Copyright (c) 2017 Elasticsearch BV", handler.getCppCopyright());
|
||||
assertEquals("Did not understand verb 'a'\n", handler.getErrors());
|
||||
assertFalse(handler.seenFatalError());
|
||||
}
|
||||
|
|
|
@ -30,7 +30,7 @@ import static org.mockito.Mockito.mock;
|
|||
import static org.mockito.Mockito.when;
|
||||
|
||||
@LuceneTestCase.AwaitsFix(bugUrl = "https://github.com/elastic/prelert-legacy/issues/127")
|
||||
public class ElasticsearchBatchedDocumentsIteratorTests extends ESTestCase {
|
||||
public class BatchedDocumentsIteratorTests extends ESTestCase {
|
||||
private static final String INDEX_NAME = ".ml-anomalies-foo";
|
||||
private static final String SCROLL_ID = "someScrollId";
|
||||
|
||||
|
@ -187,7 +187,7 @@ public class ElasticsearchBatchedDocumentsIteratorTests extends ESTestCase {
|
|||
}
|
||||
}
|
||||
|
||||
private static class TestIterator extends ElasticsearchBatchedDocumentsIterator<String> {
|
||||
private static class TestIterator extends BatchedDocumentsIterator<String> {
|
||||
public TestIterator(Client client, String jobId) {
|
||||
super(client, jobId);
|
||||
}
|
|
@ -5,19 +5,23 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.job.persistence;
|
||||
|
||||
import org.elasticsearch.client.Client;
|
||||
import org.elasticsearch.search.SearchHit;
|
||||
|
||||
import java.util.Deque;
|
||||
import java.util.List;
|
||||
import java.util.NoSuchElementException;
|
||||
|
||||
import static org.junit.Assert.assertEquals;
|
||||
import static org.mockito.Mockito.mock;
|
||||
|
||||
public class MockBatchedDocumentsIterator<T> implements BatchedDocumentsIterator<T> {
|
||||
public class MockBatchedDocumentsIterator<T> extends BatchedDocumentsIterator<T> {
|
||||
private final List<Deque<T>> batches;
|
||||
private int index;
|
||||
private boolean wasTimeRangeCalled;
|
||||
private String interimFieldName;
|
||||
|
||||
public MockBatchedDocumentsIterator(List<Deque<T>> batches) {
|
||||
super(mock(Client.class), "foo");
|
||||
this.batches = batches;
|
||||
index = 0;
|
||||
wasTimeRangeCalled = false;
|
||||
|
@ -44,6 +48,16 @@ public class MockBatchedDocumentsIterator<T> implements BatchedDocumentsIterator
|
|||
return batches.get(index++);
|
||||
}
|
||||
|
||||
@Override
|
||||
protected String getType() {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
protected T map(SearchHit hit) {
|
||||
return null;
|
||||
}
|
||||
|
||||
@Override
|
||||
public boolean hasNext() {
|
||||
return index != batches.size();
|
||||
|
|
|
@ -18,7 +18,7 @@ import org.elasticsearch.xpack.ml.job.AnalysisConfig;
|
|||
import org.elasticsearch.xpack.ml.job.Detector;
|
||||
import org.elasticsearch.xpack.ml.job.Job;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.BatchedDocumentsIterator;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.ElasticsearchBatchedResultsIterator;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.BatchedResultsIterator;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobProvider;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.JobRenormalizedResultsPersister;
|
||||
import org.elasticsearch.xpack.ml.job.persistence.MockBatchedDocumentsIterator;
|
||||
|
@ -192,10 +192,10 @@ public class ScoresUpdaterTests extends ESTestCase {
|
|||
bucket1.setAnomalyScore(42.0);
|
||||
bucket1.addBucketInfluencer(createTimeBucketInfluencer(bucket1.getTimestamp(), 0.04, 42.0));
|
||||
bucket1.setMaxNormalizedProbability(50.0);
|
||||
List<ElasticsearchBatchedResultsIterator.ResultWithIndex<AnomalyRecord>> records = new ArrayList<>();
|
||||
List<BatchedResultsIterator.ResultWithIndex<AnomalyRecord>> records = new ArrayList<>();
|
||||
Date date = new Date();
|
||||
for (int i=0; i<100000; i++) {
|
||||
records.add(new ElasticsearchBatchedResultsIterator.ResultWithIndex<>("foo", new AnomalyRecord("foo", date, 1, i)));
|
||||
records.add(new BatchedResultsIterator.ResultWithIndex<>("foo", new AnomalyRecord("foo", date, 1, i)));
|
||||
}
|
||||
|
||||
Bucket bucket2 = generateBucket(new Date(10000 * 1000));
|
||||
|
@ -209,9 +209,9 @@ public class ScoresUpdaterTests extends ESTestCase {
|
|||
givenProviderReturnsBuckets(batch);
|
||||
|
||||
|
||||
List<Deque<ElasticsearchBatchedResultsIterator.ResultWithIndex<AnomalyRecord>>> recordBatches = new ArrayList<>();
|
||||
List<Deque<BatchedResultsIterator.ResultWithIndex<AnomalyRecord>>> recordBatches = new ArrayList<>();
|
||||
recordBatches.add(new ArrayDeque<>(records));
|
||||
BatchedDocumentsIterator<ElasticsearchBatchedResultsIterator.ResultWithIndex<AnomalyRecord>> recordIter =
|
||||
BatchedDocumentsIterator<BatchedResultsIterator.ResultWithIndex<AnomalyRecord>> recordIter =
|
||||
new MockBatchedDocumentsIterator<>(recordBatches);
|
||||
when(jobProvider.newBatchedRecordsIterator(JOB_ID)).thenReturn(recordIter);
|
||||
|
||||
|
@ -341,29 +341,29 @@ public class ScoresUpdaterTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private void givenBuckets(List<Deque<Bucket>> batches) {
|
||||
List<Deque<ElasticsearchBatchedResultsIterator.ResultWithIndex<Bucket>>> batchesWithIndex = new ArrayList<>();
|
||||
List<Deque<BatchedResultsIterator.ResultWithIndex<Bucket>>> batchesWithIndex = new ArrayList<>();
|
||||
for (Deque<Bucket> deque : batches) {
|
||||
Deque<ElasticsearchBatchedResultsIterator.ResultWithIndex<Bucket>> queueWithIndex = new ArrayDeque<>();
|
||||
Deque<BatchedResultsIterator.ResultWithIndex<Bucket>> queueWithIndex = new ArrayDeque<>();
|
||||
for (Bucket bucket : deque) {
|
||||
queueWithIndex.add(new ElasticsearchBatchedResultsIterator.ResultWithIndex<>("foo", bucket));
|
||||
queueWithIndex.add(new BatchedResultsIterator.ResultWithIndex<>("foo", bucket));
|
||||
}
|
||||
batchesWithIndex.add(queueWithIndex);
|
||||
}
|
||||
|
||||
BatchedDocumentsIterator<ElasticsearchBatchedResultsIterator.ResultWithIndex<Bucket>> bucketIter =
|
||||
BatchedDocumentsIterator<BatchedResultsIterator.ResultWithIndex<Bucket>> bucketIter =
|
||||
new MockBatchedDocumentsIterator<>(batchesWithIndex);
|
||||
when(jobProvider.newBatchedBucketsIterator(JOB_ID)).thenReturn(bucketIter);
|
||||
}
|
||||
|
||||
private void givenProviderReturnsRecords(Deque<AnomalyRecord> records) {
|
||||
Deque<ElasticsearchBatchedResultsIterator.ResultWithIndex<AnomalyRecord>> batch = new ArrayDeque<>();
|
||||
List<Deque<ElasticsearchBatchedResultsIterator.ResultWithIndex<AnomalyRecord>>> batches = new ArrayList<>();
|
||||
Deque<BatchedResultsIterator.ResultWithIndex<AnomalyRecord>> batch = new ArrayDeque<>();
|
||||
List<Deque<BatchedResultsIterator.ResultWithIndex<AnomalyRecord>>> batches = new ArrayList<>();
|
||||
for (AnomalyRecord record : records) {
|
||||
batch.add(new ElasticsearchBatchedResultsIterator.ResultWithIndex<>("foo", record));
|
||||
batch.add(new BatchedResultsIterator.ResultWithIndex<>("foo", record));
|
||||
}
|
||||
batches.add(batch);
|
||||
|
||||
BatchedDocumentsIterator<ElasticsearchBatchedResultsIterator.ResultWithIndex<AnomalyRecord>> recordIter =
|
||||
BatchedDocumentsIterator<BatchedResultsIterator.ResultWithIndex<AnomalyRecord>> recordIter =
|
||||
new MockBatchedDocumentsIterator<>(batches);
|
||||
when(jobProvider.newBatchedRecordsIterator(JOB_ID)).thenReturn(recordIter);
|
||||
}
|
||||
|
@ -373,13 +373,13 @@ public class ScoresUpdaterTests extends ESTestCase {
|
|||
}
|
||||
|
||||
private void givenProviderReturnsInfluencers(Deque<Influencer> influencers) {
|
||||
List<Deque<ElasticsearchBatchedResultsIterator.ResultWithIndex<Influencer>>> batches = new ArrayList<>();
|
||||
Deque<ElasticsearchBatchedResultsIterator.ResultWithIndex<Influencer>> queue = new ArrayDeque<>();
|
||||
List<Deque<BatchedResultsIterator.ResultWithIndex<Influencer>>> batches = new ArrayList<>();
|
||||
Deque<BatchedResultsIterator.ResultWithIndex<Influencer>> queue = new ArrayDeque<>();
|
||||
for (Influencer inf : influencers) {
|
||||
queue.add(new ElasticsearchBatchedResultsIterator.ResultWithIndex<>("foo", inf));
|
||||
queue.add(new BatchedResultsIterator.ResultWithIndex<>("foo", inf));
|
||||
}
|
||||
batches.add(queue);
|
||||
BatchedDocumentsIterator<ElasticsearchBatchedResultsIterator.ResultWithIndex<Influencer>> iterator =
|
||||
BatchedDocumentsIterator<BatchedResultsIterator.ResultWithIndex<Influencer>> iterator =
|
||||
new MockBatchedDocumentsIterator<>(batches);
|
||||
when(jobProvider.newBatchedInfluencersIterator(JOB_ID)).thenReturn(iterator);
|
||||
}
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
rootProject.name = 'ml'
|
||||
include ':cpp'
|
||||
include ':elasticsearch'
|
||||
include ':docs'
|
||||
include ':kibana'
|
||||
|
|
|
@ -1 +0,0 @@
|
|||
.vagrant
|
|
@ -1,108 +0,0 @@
|
|||
## Vagrant build environment
|
||||
|
||||
This provides a vagrant box for building the C++ side of Ml (and the Java side,
|
||||
although that is easily accomplished outside vagrant).
|
||||
|
||||
Provisioning the box will take a fair amount of time, since it needs to download
|
||||
and compile a number of dependencies.
|
||||
|
||||
|
||||
### Details
|
||||
- Ubuntu Trusty64 (14.04.5 LTS)
|
||||
- 25% of host's memory
|
||||
- 100% host's cores
|
||||
- Maps ml source repository to `/home/vagrant/ml/src`
|
||||
- Directory is shared with the host, so you can point your IDE to the ml repo
|
||||
and build inside vagrant
|
||||
- Maps ml build directory to `/home/vagrant/ml/build`
|
||||
- Changes into `/home/vagrant/ml/src` on login
|
||||
|
||||
### Pre-baked box
|
||||
Don't feel like compiling the entire box? No fear, there's a pre-baked box available
|
||||
on S3. It is ~1.1gb to download:
|
||||
|
||||
```bash
|
||||
# Change into some random directory to download the box.
|
||||
# Doesn't matter where this goes, but *cannot* go into prelert-legacy/vagrant
|
||||
$ cd ~/some_directory
|
||||
|
||||
# Export the path to your prelert-legacy repo. This is so the box knows where
|
||||
# to sync the folders
|
||||
$ export ML_SRC_HOME=/path/to/prelert-legacy
|
||||
|
||||
# Download the box from S3
|
||||
$ s3cmd get s3://ml-elastic-dump/ml_env.box
|
||||
# ...
|
||||
# Downloading...
|
||||
# ...
|
||||
|
||||
$ vagrant box add ml ml_env.box
|
||||
$ vagrant init ml
|
||||
$ vagrant up
|
||||
$ vagrant ssh
|
||||
|
||||
Welcome to Ubuntu 14.04.5 LTS (GNU/Linux 3.13.0-96-generic x86_64)
|
||||
...
|
||||
...
|
||||
Last login: Tue Oct 4 16:06:32 2016 from 10.0.2.2
|
||||
|
||||
vagrant@vagrant-ubuntu-trusty-64:~/ml/src$
|
||||
```
|
||||
|
||||
Once you've logged into the box, you'll be in the ml source directory. You
|
||||
can build immediately via:
|
||||
|
||||
```bash
|
||||
vagrant@vagrant-ubuntu-trusty-64:~/ml/src$ gradle cppmake
|
||||
```
|
||||
The pre-baked box has already compiled ml once, so subsequent compilations
|
||||
should happen considerably faster.
|
||||
|
||||
### Compiling from Scratch
|
||||
|
||||
If you feel like compiling everything from scratch instead of downloading the pre-baked
|
||||
box, simply `vagrant up` and let the provisioners run:
|
||||
|
||||
```bash
|
||||
$ cd prelert-legacy/vagrant
|
||||
$ vagrant up
|
||||
# ...
|
||||
# wait while vagrant provisions
|
||||
# ...
|
||||
$ vagrant ssh
|
||||
|
||||
Welcome to Ubuntu 14.04.5 LTS (GNU/Linux 3.13.0-96-generic x86_64)
|
||||
...
|
||||
...
|
||||
Last login: Tue Oct 4 16:06:32 2016 from 10.0.2.2
|
||||
|
||||
vagrant@vagrant-ubuntu-trusty-64:~/ml/src$
|
||||
```
|
||||
|
||||
Once you've logged into the box, you'll be in the ml source directory. You
|
||||
can build immediately via:
|
||||
|
||||
```bash
|
||||
vagrant@vagrant-ubuntu-trusty-64:~/ml/src$ gradle cppmake
|
||||
# ...
|
||||
# much building
|
||||
# ...
|
||||
```
|
||||
|
||||
### Suspending your box
|
||||
Once you've provisioned a box, you can use `vagrant suspend` to "sleep" the box.
|
||||
This has the advantage of rebooting quickly when you `vagrant up`, and returns you
|
||||
to exactly what you were doing. On the downside, it eats more disk space since it
|
||||
needs to sleep the entire image.
|
||||
|
||||
You can alternatively use `vagrant halt`, which gracefully powers down the machine.
|
||||
Rebooting via `vagrant up` takes longer since you are rebooting the entire OS,
|
||||
but it saves more disk space.
|
||||
|
||||
### Fixing a broken box
|
||||
If you accidentally kill the provisioning process before it completes, you can
|
||||
attempt to reprovision it with `vagrant reload --provision`. That will run
|
||||
the provisioners that did not complete previously.
|
||||
|
||||
If your box is still horribly broken, you can destroy it with `vagrant destroy`
|
||||
and try again with `vagrant up`
|
|
@ -1,37 +0,0 @@
|
|||
Vagrant.configure(2) do |config|
|
||||
config.vm.box = "ubuntu/trusty64"
|
||||
config.vm.provider "virtualbox" do |v|
|
||||
host = RbConfig::CONFIG['host_os']
|
||||
|
||||
# Give VM 1/4 system memory
|
||||
linux = RUBY_PLATFORM =~ /linux/
|
||||
osx = RUBY_PLATFORM =~ /darwin/
|
||||
windows = (/cygwin|mswin|mingw|bccwin|wince|emx/ =~ RUBY_PLATFORM) != nil
|
||||
if osx
|
||||
cpus = `sysctl -n hw.ncpu`.to_i
|
||||
mem = `sysctl -n hw.memsize`.to_i / 1024 / 1024
|
||||
end
|
||||
if linux
|
||||
cpus = `nproc`.to_i
|
||||
mem = `sed -n -e '/^MemTotal/s/^[^0-9]*//p' /proc/meminfo`.to_i / 1024
|
||||
end
|
||||
if windows
|
||||
cpus = `wmic computersystem get numberofprocessors`.split("\n")[2].to_i
|
||||
mem = `wmic OS get TotalVisibleMemorySize`.split("\n")[2].to_i / 1024
|
||||
end
|
||||
|
||||
mem = mem / 4
|
||||
v.customize ["modifyvm", :id, "--memory", mem]
|
||||
v.customize ["modifyvm", :id, "--cpus", cpus]
|
||||
end
|
||||
|
||||
if File.expand_path(File.dirname(__FILE__)).include? "prelert-legacy/vagrant"
|
||||
puts "Syncing host's source directory [" + File.expand_path("../") + "] to [/home/vagrant/ml/src]"
|
||||
config.vm.synced_folder "../", "/home/vagrant/ml/src", mount_options: ["dmode=777,fmode=777"]
|
||||
else
|
||||
puts "Syncing host's source directory [" + File.expand_path(ENV['ML_SRC_HOME']) + "] to [/home/vagrant/ml/src] (via $ML_SRC_HOME)"
|
||||
config.vm.synced_folder ENV['ML_SRC_HOME'], "/home/vagrant/ml/src", mount_options: ["dmode=777,fmode=777"]
|
||||
end
|
||||
|
||||
config.vm.provision :shell, path: "provision.sh"
|
||||
end
|
Loading…
Reference in New Issue