[ML] Deprecate X-Pack centric ML endpoints (#36315)
This commit is part of our plan to deprecate and ultimately remove the use of _xpack in the REST APIs. Relates #35958
This commit is contained in:
parent
1d73c1712b
commit
9e8cfbb40d
|
@ -104,7 +104,7 @@ public class RestTestsFromSnippetsTask extends SnippetsTask {
|
|||
* format of the response is incompatible i.e. it is not a JSON object.
|
||||
*/
|
||||
static shouldAddShardFailureCheck(String path) {
|
||||
return path.startsWith('_cat') == false && path.startsWith('_xpack/ml/datafeeds/') == false
|
||||
return path.startsWith('_cat') == false && path.startsWith('_ml/datafeeds/') == false
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
@ -59,6 +59,6 @@ class RestTestFromSnippetsTaskTests extends GradleUnitTestCase {
|
|||
void testIsDocWriteRequest() {
|
||||
assertTrue(shouldAddShardFailureCheck("doc-index/_search"));
|
||||
assertFalse(shouldAddShardFailureCheck("_cat"))
|
||||
assertFalse(shouldAddShardFailureCheck("_xpack/ml/datafeeds/datafeed-id/_preview"));
|
||||
assertFalse(shouldAddShardFailureCheck("_ml/datafeeds/datafeed-id/_preview"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -87,8 +87,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request putJob(PutJobRequest putJobRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(putJobRequest.getJob().getId())
|
||||
.build();
|
||||
|
@ -99,8 +98,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request getJob(GetJobRequest getJobRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(Strings.collectionToCommaDelimitedString(getJobRequest.getJobIds()))
|
||||
.build();
|
||||
|
@ -116,8 +114,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request getJobStats(GetJobStatsRequest getJobStatsRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(Strings.collectionToCommaDelimitedString(getJobStatsRequest.getJobIds()))
|
||||
.addPathPartAsIs("_stats")
|
||||
|
@ -133,8 +130,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request openJob(OpenJobRequest openJobRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(openJobRequest.getJobId())
|
||||
.addPathPartAsIs("_open")
|
||||
|
@ -146,8 +142,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request closeJob(CloseJobRequest closeJobRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(Strings.collectionToCommaDelimitedString(closeJobRequest.getJobIds()))
|
||||
.addPathPartAsIs("_close")
|
||||
|
@ -159,8 +154,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request deleteExpiredData(DeleteExpiredDataRequest deleteExpiredDataRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("_delete_expired_data")
|
||||
.build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
|
@ -170,8 +164,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request deleteJob(DeleteJobRequest deleteJobRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(deleteJobRequest.getJobId())
|
||||
.build();
|
||||
|
@ -190,8 +183,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request flushJob(FlushJobRequest flushJobRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(flushJobRequest.getJobId())
|
||||
.addPathPartAsIs("_flush")
|
||||
|
@ -203,8 +195,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request forecastJob(ForecastJobRequest forecastJobRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(forecastJobRequest.getJobId())
|
||||
.addPathPartAsIs("_forecast")
|
||||
|
@ -216,8 +207,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request updateJob(UpdateJobRequest updateJobRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(updateJobRequest.getJobUpdate().getJobId())
|
||||
.addPathPartAsIs("_update")
|
||||
|
@ -229,8 +219,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request putDatafeed(PutDatafeedRequest putDatafeedRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("datafeeds")
|
||||
.addPathPart(putDatafeedRequest.getDatafeed().getId())
|
||||
.build();
|
||||
|
@ -241,8 +230,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request updateDatafeed(UpdateDatafeedRequest updateDatafeedRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("datafeeds")
|
||||
.addPathPart(updateDatafeedRequest.getDatafeedUpdate().getId())
|
||||
.addPathPartAsIs("_update")
|
||||
|
@ -254,8 +242,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request getDatafeed(GetDatafeedRequest getDatafeedRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("datafeeds")
|
||||
.addPathPart(Strings.collectionToCommaDelimitedString(getDatafeedRequest.getDatafeedIds()))
|
||||
.build();
|
||||
|
@ -272,8 +259,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request deleteDatafeed(DeleteDatafeedRequest deleteDatafeedRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("datafeeds")
|
||||
.addPathPart(deleteDatafeedRequest.getDatafeedId())
|
||||
.build();
|
||||
|
@ -287,8 +273,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request startDatafeed(StartDatafeedRequest startDatafeedRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("datafeeds")
|
||||
.addPathPart(startDatafeedRequest.getDatafeedId())
|
||||
.addPathPartAsIs("_start")
|
||||
|
@ -300,8 +285,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request stopDatafeed(StopDatafeedRequest stopDatafeedRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("datafeeds")
|
||||
.addPathPart(Strings.collectionToCommaDelimitedString(stopDatafeedRequest.getDatafeedIds()))
|
||||
.addPathPartAsIs("_stop")
|
||||
|
@ -313,8 +297,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request getDatafeedStats(GetDatafeedStatsRequest getDatafeedStatsRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("datafeeds")
|
||||
.addPathPart(Strings.collectionToCommaDelimitedString(getDatafeedStatsRequest.getDatafeedIds()))
|
||||
.addPathPartAsIs("_stats")
|
||||
|
@ -330,8 +313,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request previewDatafeed(PreviewDatafeedRequest previewDatafeedRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("datafeeds")
|
||||
.addPathPart(previewDatafeedRequest.getDatafeedId())
|
||||
.addPathPartAsIs("_preview")
|
||||
|
@ -341,8 +323,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request deleteForecast(DeleteForecastRequest deleteForecastRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(deleteForecastRequest.getJobId())
|
||||
.addPathPartAsIs("_forecast")
|
||||
|
@ -361,8 +342,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request deleteModelSnapshot(DeleteModelSnapshotRequest deleteModelSnapshotRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(deleteModelSnapshotRequest.getJobId())
|
||||
.addPathPartAsIs("model_snapshots")
|
||||
|
@ -373,8 +353,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request getBuckets(GetBucketsRequest getBucketsRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(getBucketsRequest.getJobId())
|
||||
.addPathPartAsIs("results")
|
||||
|
@ -387,8 +366,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request getCategories(GetCategoriesRequest getCategoriesRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(getCategoriesRequest.getJobId())
|
||||
.addPathPartAsIs("results")
|
||||
|
@ -401,8 +379,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request getModelSnapshots(GetModelSnapshotsRequest getModelSnapshotsRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(getModelSnapshotsRequest.getJobId())
|
||||
.addPathPartAsIs("model_snapshots")
|
||||
|
@ -414,8 +391,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request updateModelSnapshot(UpdateModelSnapshotRequest updateModelSnapshotRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(updateModelSnapshotRequest.getJobId())
|
||||
.addPathPartAsIs("model_snapshots")
|
||||
|
@ -429,8 +405,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request revertModelSnapshot(RevertModelSnapshotRequest revertModelSnapshotsRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(revertModelSnapshotsRequest.getJobId())
|
||||
.addPathPartAsIs("model_snapshots")
|
||||
|
@ -444,8 +419,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request getOverallBuckets(GetOverallBucketsRequest getOverallBucketsRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(Strings.collectionToCommaDelimitedString(getOverallBucketsRequest.getJobIds()))
|
||||
.addPathPartAsIs("results")
|
||||
|
@ -458,8 +432,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request getRecords(GetRecordsRequest getRecordsRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(getRecordsRequest.getJobId())
|
||||
.addPathPartAsIs("results")
|
||||
|
@ -472,8 +445,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request postData(PostDataRequest postDataRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(postDataRequest.getJobId())
|
||||
.addPathPartAsIs("_data")
|
||||
|
@ -501,8 +473,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request getInfluencers(GetInfluencersRequest getInfluencersRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("anomaly_detectors")
|
||||
.addPathPart(getInfluencersRequest.getJobId())
|
||||
.addPathPartAsIs("results")
|
||||
|
@ -515,8 +486,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request putCalendar(PutCalendarRequest putCalendarRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("calendars")
|
||||
.addPathPart(putCalendarRequest.getCalendar().getId())
|
||||
.build();
|
||||
|
@ -527,8 +497,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request getCalendars(GetCalendarsRequest getCalendarsRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("calendars")
|
||||
.addPathPart(getCalendarsRequest.getCalendarId())
|
||||
.build();
|
||||
|
@ -539,8 +508,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request putCalendarJob(PutCalendarJobRequest putCalendarJobRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("calendars")
|
||||
.addPathPart(putCalendarJobRequest.getCalendarId())
|
||||
.addPathPartAsIs("jobs")
|
||||
|
@ -551,8 +519,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request deleteCalendarJob(DeleteCalendarJobRequest deleteCalendarJobRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("calendars")
|
||||
.addPathPart(deleteCalendarJobRequest.getCalendarId())
|
||||
.addPathPartAsIs("jobs")
|
||||
|
@ -563,8 +530,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request deleteCalendar(DeleteCalendarRequest deleteCalendarRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("calendars")
|
||||
.addPathPart(deleteCalendarRequest.getCalendarId())
|
||||
.build();
|
||||
|
@ -574,8 +540,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request getCalendarEvents(GetCalendarEventsRequest getCalendarEventsRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("calendars")
|
||||
.addPathPart(getCalendarEventsRequest.getCalendarId())
|
||||
.addPathPartAsIs("events")
|
||||
|
@ -587,8 +552,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request postCalendarEvents(PostCalendarEventRequest postCalendarEventRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("calendars")
|
||||
.addPathPart(postCalendarEventRequest.getCalendarId())
|
||||
.addPathPartAsIs("events")
|
||||
|
@ -602,8 +566,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request deleteCalendarEvent(DeleteCalendarEventRequest deleteCalendarEventRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("calendars")
|
||||
.addPathPart(deleteCalendarEventRequest.getCalendarId())
|
||||
.addPathPartAsIs("events")
|
||||
|
@ -614,8 +577,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request putFilter(PutFilterRequest putFilterRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("filters")
|
||||
.addPathPart(putFilterRequest.getMlFilter().getId())
|
||||
.build();
|
||||
|
@ -626,8 +588,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request getFilter(GetFiltersRequest getFiltersRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("filters")
|
||||
.addPathPart(getFiltersRequest.getFilterId())
|
||||
.build();
|
||||
|
@ -644,8 +605,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request updateFilter(UpdateFilterRequest updateFilterRequest) throws IOException {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("filters")
|
||||
.addPathPart(updateFilterRequest.getFilterId())
|
||||
.addPathPartAsIs("_update")
|
||||
|
@ -657,7 +617,7 @@ final class MLRequestConverters {
|
|||
|
||||
static Request deleteFilter(DeleteFilterRequest deleteFilterRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack", "ml", "filters")
|
||||
.addPathPartAsIs("_ml", "filters")
|
||||
.addPathPart(deleteFilterRequest.getId())
|
||||
.build();
|
||||
Request request = new Request(HttpDelete.METHOD_NAME, endpoint);
|
||||
|
@ -666,15 +626,14 @@ final class MLRequestConverters {
|
|||
|
||||
static Request mlInfo(MlInfoRequest infoRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack", "ml", "info")
|
||||
.addPathPartAsIs("_ml", "info")
|
||||
.build();
|
||||
return new Request(HttpGet.METHOD_NAME, endpoint);
|
||||
}
|
||||
|
||||
static Request findFileStructure(FindFileStructureRequest findFileStructureRequest) {
|
||||
String endpoint = new EndpointBuilder()
|
||||
.addPathPartAsIs("_xpack")
|
||||
.addPathPartAsIs("ml")
|
||||
.addPathPartAsIs("_ml")
|
||||
.addPathPartAsIs("find_file_structure")
|
||||
.build();
|
||||
Request request = new Request(HttpPost.METHOD_NAME, endpoint);
|
||||
|
|
|
@ -112,7 +112,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
Request request = MLRequestConverters.putJob(putJobRequest);
|
||||
|
||||
assertEquals(HttpPut.METHOD_NAME, request.getMethod());
|
||||
assertThat(request.getEndpoint(), equalTo("/_xpack/ml/anomaly_detectors/foo"));
|
||||
assertThat(request.getEndpoint(), equalTo("/_ml/anomaly_detectors/foo"));
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
|
||||
Job parsedJob = Job.PARSER.apply(parser, null).build();
|
||||
assertThat(parsedJob, equalTo(job));
|
||||
|
@ -125,14 +125,14 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
Request request = MLRequestConverters.getJob(getJobRequest);
|
||||
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/anomaly_detectors", request.getEndpoint());
|
||||
assertEquals("/_ml/anomaly_detectors", request.getEndpoint());
|
||||
assertFalse(request.getParameters().containsKey("allow_no_jobs"));
|
||||
|
||||
getJobRequest = new GetJobRequest("job1", "jobs*");
|
||||
getJobRequest.setAllowNoJobs(true);
|
||||
request = MLRequestConverters.getJob(getJobRequest);
|
||||
|
||||
assertEquals("/_xpack/ml/anomaly_detectors/job1,jobs*", request.getEndpoint());
|
||||
assertEquals("/_ml/anomaly_detectors/job1,jobs*", request.getEndpoint());
|
||||
assertEquals(Boolean.toString(true), request.getParameters().get("allow_no_jobs"));
|
||||
}
|
||||
|
||||
|
@ -142,14 +142,14 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
Request request = MLRequestConverters.getJobStats(getJobStatsRequestRequest);
|
||||
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/anomaly_detectors/_stats", request.getEndpoint());
|
||||
assertEquals("/_ml/anomaly_detectors/_stats", request.getEndpoint());
|
||||
assertFalse(request.getParameters().containsKey("allow_no_jobs"));
|
||||
|
||||
getJobStatsRequestRequest = new GetJobStatsRequest("job1", "jobs*");
|
||||
getJobStatsRequestRequest.setAllowNoJobs(true);
|
||||
request = MLRequestConverters.getJobStats(getJobStatsRequestRequest);
|
||||
|
||||
assertEquals("/_xpack/ml/anomaly_detectors/job1,jobs*/_stats", request.getEndpoint());
|
||||
assertEquals("/_ml/anomaly_detectors/job1,jobs*/_stats", request.getEndpoint());
|
||||
assertEquals(Boolean.toString(true), request.getParameters().get("allow_no_jobs"));
|
||||
}
|
||||
|
||||
|
@ -161,7 +161,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
|
||||
Request request = MLRequestConverters.openJob(openJobRequest);
|
||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/_open", request.getEndpoint());
|
||||
assertEquals("/_ml/anomaly_detectors/" + jobId + "/_open", request.getEndpoint());
|
||||
assertEquals(requestEntityToString(request), "{\"job_id\":\""+ jobId +"\",\"timeout\":\"10m\"}");
|
||||
}
|
||||
|
||||
|
@ -171,7 +171,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
|
||||
Request request = MLRequestConverters.closeJob(closeJobRequest);
|
||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/_close", request.getEndpoint());
|
||||
assertEquals("/_ml/anomaly_detectors/" + jobId + "/_close", request.getEndpoint());
|
||||
assertEquals("{\"job_id\":\"somejobid\"}", requestEntityToString(request));
|
||||
|
||||
closeJobRequest = new CloseJobRequest(jobId, "otherjobs*");
|
||||
|
@ -180,7 +180,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
closeJobRequest.setTimeout(TimeValue.timeValueMinutes(10));
|
||||
request = MLRequestConverters.closeJob(closeJobRequest);
|
||||
|
||||
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + ",otherjobs*/_close", request.getEndpoint());
|
||||
assertEquals("/_ml/anomaly_detectors/" + jobId + ",otherjobs*/_close", request.getEndpoint());
|
||||
assertEquals("{\"job_id\":\"somejobid,otherjobs*\",\"timeout\":\"10m\",\"force\":true,\"allow_no_jobs\":false}",
|
||||
requestEntityToString(request));
|
||||
}
|
||||
|
@ -190,7 +190,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
|
||||
Request request = MLRequestConverters.deleteExpiredData(deleteExpiredDataRequest);
|
||||
assertEquals(HttpDelete.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/_delete_expired_data", request.getEndpoint());
|
||||
assertEquals("/_ml/_delete_expired_data", request.getEndpoint());
|
||||
}
|
||||
|
||||
public void testDeleteJob() {
|
||||
|
@ -199,7 +199,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
|
||||
Request request = MLRequestConverters.deleteJob(deleteJobRequest);
|
||||
assertEquals(HttpDelete.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId, request.getEndpoint());
|
||||
assertEquals("/_ml/anomaly_detectors/" + jobId, request.getEndpoint());
|
||||
assertNull(request.getParameters().get("force"));
|
||||
assertNull(request.getParameters().get("wait_for_completion"));
|
||||
|
||||
|
@ -220,7 +220,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
|
||||
Request request = MLRequestConverters.flushJob(flushJobRequest);
|
||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/_flush", request.getEndpoint());
|
||||
assertEquals("/_ml/anomaly_detectors/" + jobId + "/_flush", request.getEndpoint());
|
||||
assertEquals("{\"job_id\":\"" + jobId + "\"}", requestEntityToString(request));
|
||||
|
||||
flushJobRequest.setSkipTime("1000");
|
||||
|
@ -243,7 +243,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
forecastJobRequest.setExpiresIn(TimeValue.timeValueHours(12));
|
||||
Request request = MLRequestConverters.forecastJob(forecastJobRequest);
|
||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/_forecast", request.getEndpoint());
|
||||
assertEquals("/_ml/anomaly_detectors/" + jobId + "/_forecast", request.getEndpoint());
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
|
||||
ForecastJobRequest parsedRequest = ForecastJobRequest.PARSER.apply(parser, null);
|
||||
assertThat(parsedRequest, equalTo(forecastJobRequest));
|
||||
|
@ -257,7 +257,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
|
||||
Request request = MLRequestConverters.updateJob(updateJobRequest);
|
||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/_update", request.getEndpoint());
|
||||
assertEquals("/_ml/anomaly_detectors/" + jobId + "/_update", request.getEndpoint());
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
|
||||
JobUpdate.Builder parsedRequest = JobUpdate.PARSER.apply(parser, null);
|
||||
assertThat(parsedRequest.build(), equalTo(updates));
|
||||
|
@ -271,7 +271,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
Request request = MLRequestConverters.putDatafeed(putDatafeedRequest);
|
||||
|
||||
assertEquals(HttpPut.METHOD_NAME, request.getMethod());
|
||||
assertThat(request.getEndpoint(), equalTo("/_xpack/ml/datafeeds/" + datafeed.getId()));
|
||||
assertThat(request.getEndpoint(), equalTo("/_ml/datafeeds/" + datafeed.getId()));
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
|
||||
DatafeedConfig parsedDatafeed = DatafeedConfig.PARSER.apply(parser, null).build();
|
||||
assertThat(parsedDatafeed, equalTo(datafeed));
|
||||
|
@ -284,14 +284,14 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
Request request = MLRequestConverters.getDatafeed(getDatafeedRequest);
|
||||
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/datafeeds", request.getEndpoint());
|
||||
assertEquals("/_ml/datafeeds", request.getEndpoint());
|
||||
assertFalse(request.getParameters().containsKey("allow_no_datafeeds"));
|
||||
|
||||
getDatafeedRequest = new GetDatafeedRequest("feed-1", "feed-*");
|
||||
getDatafeedRequest.setAllowNoDatafeeds(true);
|
||||
request = MLRequestConverters.getDatafeed(getDatafeedRequest);
|
||||
|
||||
assertEquals("/_xpack/ml/datafeeds/feed-1,feed-*", request.getEndpoint());
|
||||
assertEquals("/_ml/datafeeds/feed-1,feed-*", request.getEndpoint());
|
||||
assertEquals(Boolean.toString(true), request.getParameters().get("allow_no_datafeeds"));
|
||||
}
|
||||
|
||||
|
@ -301,7 +301,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
|
||||
Request request = MLRequestConverters.deleteDatafeed(deleteDatafeedRequest);
|
||||
assertEquals(HttpDelete.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/datafeeds/" + datafeedId, request.getEndpoint());
|
||||
assertEquals("/_ml/datafeeds/" + datafeedId, request.getEndpoint());
|
||||
assertFalse(request.getParameters().containsKey("force"));
|
||||
|
||||
deleteDatafeedRequest.setForce(true);
|
||||
|
@ -315,7 +315,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
|
||||
Request request = MLRequestConverters.startDatafeed(datafeedRequest);
|
||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/datafeeds/" + datafeedId + "/_start", request.getEndpoint());
|
||||
assertEquals("/_ml/datafeeds/" + datafeedId + "/_start", request.getEndpoint());
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
|
||||
StartDatafeedRequest parsedDatafeedRequest = StartDatafeedRequest.PARSER.apply(parser, null);
|
||||
assertThat(parsedDatafeedRequest, equalTo(datafeedRequest));
|
||||
|
@ -329,7 +329,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
datafeedRequest.setAllowNoDatafeeds(true);
|
||||
Request request = MLRequestConverters.stopDatafeed(datafeedRequest);
|
||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/datafeeds/" +
|
||||
assertEquals("/_ml/datafeeds/" +
|
||||
Strings.collectionToCommaDelimitedString(datafeedRequest.getDatafeedIds()) +
|
||||
"/_stop", request.getEndpoint());
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
|
||||
|
@ -344,14 +344,14 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
Request request = MLRequestConverters.getDatafeedStats(getDatafeedStatsRequestRequest);
|
||||
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/datafeeds/_stats", request.getEndpoint());
|
||||
assertEquals("/_ml/datafeeds/_stats", request.getEndpoint());
|
||||
assertFalse(request.getParameters().containsKey("allow_no_datafeeds"));
|
||||
|
||||
getDatafeedStatsRequestRequest = new GetDatafeedStatsRequest("datafeed1", "datafeeds*");
|
||||
getDatafeedStatsRequestRequest.setAllowNoDatafeeds(true);
|
||||
request = MLRequestConverters.getDatafeedStats(getDatafeedStatsRequestRequest);
|
||||
|
||||
assertEquals("/_xpack/ml/datafeeds/datafeed1,datafeeds*/_stats", request.getEndpoint());
|
||||
assertEquals("/_ml/datafeeds/datafeed1,datafeeds*/_stats", request.getEndpoint());
|
||||
assertEquals(Boolean.toString(true), request.getParameters().get("allow_no_datafeeds"));
|
||||
}
|
||||
|
||||
|
@ -359,7 +359,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
PreviewDatafeedRequest datafeedRequest = new PreviewDatafeedRequest("datafeed_1");
|
||||
Request request = MLRequestConverters.previewDatafeed(datafeedRequest);
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/datafeeds/" + datafeedRequest.getDatafeedId() + "/_preview", request.getEndpoint());
|
||||
assertEquals("/_ml/datafeeds/" + datafeedRequest.getDatafeedId() + "/_preview", request.getEndpoint());
|
||||
}
|
||||
|
||||
public void testDeleteForecast() {
|
||||
|
@ -368,7 +368,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
|
||||
Request request = MLRequestConverters.deleteForecast(deleteForecastRequest);
|
||||
assertEquals(HttpDelete.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/_forecast", request.getEndpoint());
|
||||
assertEquals("/_ml/anomaly_detectors/" + jobId + "/_forecast", request.getEndpoint());
|
||||
assertFalse(request.getParameters().containsKey("timeout"));
|
||||
assertFalse(request.getParameters().containsKey("allow_no_forecasts"));
|
||||
|
||||
|
@ -378,7 +378,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
|
||||
request = MLRequestConverters.deleteForecast(deleteForecastRequest);
|
||||
assertEquals(
|
||||
"/_xpack/ml/anomaly_detectors/" +
|
||||
"/_ml/anomaly_detectors/" +
|
||||
jobId +
|
||||
"/_forecast/" +
|
||||
Strings.collectionToCommaDelimitedString(deleteForecastRequest.getForecastIds()),
|
||||
|
@ -396,7 +396,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
|
||||
Request request = MLRequestConverters.deleteModelSnapshot(deleteModelSnapshotRequest);
|
||||
assertEquals(HttpDelete.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/model_snapshots/" + snapshotId, request.getEndpoint());
|
||||
assertEquals("/_ml/anomaly_detectors/" + jobId + "/model_snapshots/" + snapshotId, request.getEndpoint());
|
||||
}
|
||||
|
||||
public void testGetBuckets() throws IOException {
|
||||
|
@ -409,7 +409,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
|
||||
Request request = MLRequestConverters.getBuckets(getBucketsRequest);
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/results/buckets", request.getEndpoint());
|
||||
assertEquals("/_ml/anomaly_detectors/" + jobId + "/results/buckets", request.getEndpoint());
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
|
||||
GetBucketsRequest parsedRequest = GetBucketsRequest.PARSER.apply(parser, null);
|
||||
assertThat(parsedRequest, equalTo(getBucketsRequest));
|
||||
|
@ -424,7 +424,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
|
||||
Request request = MLRequestConverters.getCategories(getCategoriesRequest);
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/results/categories", request.getEndpoint());
|
||||
assertEquals("/_ml/anomaly_detectors/" + jobId + "/results/categories", request.getEndpoint());
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
|
||||
GetCategoriesRequest parsedRequest = GetCategoriesRequest.PARSER.apply(parser, null);
|
||||
assertThat(parsedRequest, equalTo(getCategoriesRequest));
|
||||
|
@ -439,7 +439,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
|
||||
Request request = MLRequestConverters.getModelSnapshots(getModelSnapshotsRequest);
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/model_snapshots", request.getEndpoint());
|
||||
assertEquals("/_ml/anomaly_detectors/" + jobId + "/model_snapshots", request.getEndpoint());
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
|
||||
GetModelSnapshotsRequest parsedRequest = GetModelSnapshotsRequest.PARSER.apply(parser, null);
|
||||
assertThat(parsedRequest, equalTo(getModelSnapshotsRequest));
|
||||
|
@ -455,7 +455,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
|
||||
Request request = MLRequestConverters.updateModelSnapshot(updateModelSnapshotRequest);
|
||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/model_snapshots/" + snapshotId + "/_update", request.getEndpoint());
|
||||
assertEquals("/_ml/anomaly_detectors/" + jobId + "/model_snapshots/" + snapshotId + "/_update", request.getEndpoint());
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
|
||||
UpdateModelSnapshotRequest parsedRequest = UpdateModelSnapshotRequest.PARSER.apply(parser, null);
|
||||
assertThat(parsedRequest, equalTo(updateModelSnapshotRequest));
|
||||
|
@ -472,7 +472,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
|
||||
Request request = MLRequestConverters.revertModelSnapshot(revertModelSnapshotRequest);
|
||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/model_snapshots/" + snapshotId + "/_revert",
|
||||
assertEquals("/_ml/anomaly_detectors/" + jobId + "/model_snapshots/" + snapshotId + "/_revert",
|
||||
request.getEndpoint());
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
|
||||
RevertModelSnapshotRequest parsedRequest = RevertModelSnapshotRequest.PARSER.apply(parser, null);
|
||||
|
@ -491,7 +491,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
|
||||
Request request = MLRequestConverters.getOverallBuckets(getOverallBucketsRequest);
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/results/overall_buckets", request.getEndpoint());
|
||||
assertEquals("/_ml/anomaly_detectors/" + jobId + "/results/overall_buckets", request.getEndpoint());
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
|
||||
GetOverallBucketsRequest parsedRequest = GetOverallBucketsRequest.PARSER.apply(parser, null);
|
||||
assertThat(parsedRequest, equalTo(getOverallBucketsRequest));
|
||||
|
@ -511,7 +511,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
|
||||
Request request = MLRequestConverters.getRecords(getRecordsRequest);
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/results/records", request.getEndpoint());
|
||||
assertEquals("/_ml/anomaly_detectors/" + jobId + "/results/records", request.getEndpoint());
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
|
||||
GetRecordsRequest parsedRequest = GetRecordsRequest.PARSER.apply(parser, null);
|
||||
assertThat(parsedRequest, equalTo(getRecordsRequest));
|
||||
|
@ -529,7 +529,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
Request request = MLRequestConverters.postData(postDataRequest);
|
||||
|
||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/_data", request.getEndpoint());
|
||||
assertEquals("/_ml/anomaly_detectors/" + jobId + "/_data", request.getEndpoint());
|
||||
assertEquals("{\"foo\":\"bar\"}", requestEntityToString(request));
|
||||
assertEquals(postDataRequest.getXContentType().mediaTypeWithoutParameters(), request.getEntity().getContentType().getValue());
|
||||
assertFalse(request.getParameters().containsKey(PostDataRequest.RESET_END.getPreferredName()));
|
||||
|
@ -559,7 +559,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
|
||||
Request request = MLRequestConverters.getInfluencers(getInfluencersRequest);
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/anomaly_detectors/" + jobId + "/results/influencers", request.getEndpoint());
|
||||
assertEquals("/_ml/anomaly_detectors/" + jobId + "/results/influencers", request.getEndpoint());
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
|
||||
GetInfluencersRequest parsedRequest = GetInfluencersRequest.PARSER.apply(parser, null);
|
||||
assertThat(parsedRequest, equalTo(getInfluencersRequest));
|
||||
|
@ -570,7 +570,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
PutCalendarRequest putCalendarRequest = new PutCalendarRequest(CalendarTests.testInstance());
|
||||
Request request = MLRequestConverters.putCalendar(putCalendarRequest);
|
||||
assertEquals(HttpPut.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/calendars/" + putCalendarRequest.getCalendar().getId(), request.getEndpoint());
|
||||
assertEquals("/_ml/calendars/" + putCalendarRequest.getCalendar().getId(), request.getEndpoint());
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
|
||||
Calendar parsedCalendar = Calendar.PARSER.apply(parser, null);
|
||||
assertThat(parsedCalendar, equalTo(putCalendarRequest.getCalendar()));
|
||||
|
@ -584,7 +584,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
PutCalendarJobRequest putCalendarJobRequest = new PutCalendarJobRequest(calendarId, job1, job2);
|
||||
Request request = MLRequestConverters.putCalendarJob(putCalendarJobRequest);
|
||||
assertEquals(HttpPut.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/calendars/" + calendarId + "/jobs/" + job1 + "," + job2, request.getEndpoint());
|
||||
assertEquals("/_ml/calendars/" + calendarId + "/jobs/" + job1 + "," + job2, request.getEndpoint());
|
||||
}
|
||||
|
||||
public void testDeleteCalendarJob() {
|
||||
|
@ -594,12 +594,12 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
DeleteCalendarJobRequest deleteCalendarJobRequest = new DeleteCalendarJobRequest(calendarId, job1, job2);
|
||||
Request request = MLRequestConverters.deleteCalendarJob(deleteCalendarJobRequest);
|
||||
assertEquals(HttpDelete.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/calendars/" + calendarId + "/jobs/" + job1 + "," + job2, request.getEndpoint());
|
||||
assertEquals("/_ml/calendars/" + calendarId + "/jobs/" + job1 + "," + job2, request.getEndpoint());
|
||||
}
|
||||
|
||||
public void testGetCalendars() throws IOException {
|
||||
GetCalendarsRequest getCalendarsRequest = new GetCalendarsRequest();
|
||||
String expectedEndpoint = "/_xpack/ml/calendars";
|
||||
String expectedEndpoint = "/_ml/calendars";
|
||||
|
||||
if (randomBoolean()) {
|
||||
String calendarId = randomAlphaOfLength(10);
|
||||
|
@ -623,7 +623,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
DeleteCalendarRequest deleteCalendarRequest = new DeleteCalendarRequest(randomAlphaOfLength(10));
|
||||
Request request = MLRequestConverters.deleteCalendar(deleteCalendarRequest);
|
||||
assertEquals(HttpDelete.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/calendars/" + deleteCalendarRequest.getCalendarId(), request.getEndpoint());
|
||||
assertEquals("/_ml/calendars/" + deleteCalendarRequest.getCalendarId(), request.getEndpoint());
|
||||
}
|
||||
|
||||
public void testGetCalendarEvents() throws IOException {
|
||||
|
@ -636,7 +636,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
|
||||
Request request = MLRequestConverters.getCalendarEvents(getCalendarEventsRequest);
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/calendars/" + calendarId + "/events", request.getEndpoint());
|
||||
assertEquals("/_ml/calendars/" + calendarId + "/events", request.getEndpoint());
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
|
||||
GetCalendarEventsRequest parsedRequest = GetCalendarEventsRequest.PARSER.apply(parser, null);
|
||||
assertThat(parsedRequest, equalTo(getCalendarEventsRequest));
|
||||
|
@ -652,7 +652,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
|
||||
Request request = MLRequestConverters.postCalendarEvents(postCalendarEventRequest);
|
||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/calendars/" + calendarId + "/events", request.getEndpoint());
|
||||
assertEquals("/_ml/calendars/" + calendarId + "/events", request.getEndpoint());
|
||||
|
||||
XContentBuilder builder = JsonXContent.contentBuilder();
|
||||
builder = postCalendarEventRequest.toXContent(builder, PostCalendarEventRequest.EXCLUDE_CALENDAR_ID_PARAMS);
|
||||
|
@ -665,7 +665,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
DeleteCalendarEventRequest deleteCalendarEventRequest = new DeleteCalendarEventRequest(calendarId, eventId);
|
||||
Request request = MLRequestConverters.deleteCalendarEvent(deleteCalendarEventRequest);
|
||||
assertEquals(HttpDelete.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/calendars/" + calendarId + "/events/" + eventId, request.getEndpoint());
|
||||
assertEquals("/_ml/calendars/" + calendarId + "/events/" + eventId, request.getEndpoint());
|
||||
}
|
||||
|
||||
public void testPutFilter() throws IOException {
|
||||
|
@ -675,7 +675,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
Request request = MLRequestConverters.putFilter(putFilterRequest);
|
||||
|
||||
assertEquals(HttpPut.METHOD_NAME, request.getMethod());
|
||||
assertThat(request.getEndpoint(), equalTo("/_xpack/ml/filters/foo"));
|
||||
assertThat(request.getEndpoint(), equalTo("/_ml/filters/foo"));
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
|
||||
MlFilter parsedFilter = MlFilter.PARSER.apply(parser, null).build();
|
||||
assertThat(parsedFilter, equalTo(filter));
|
||||
|
@ -690,7 +690,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
|
||||
Request request = MLRequestConverters.getFilter(getFiltersRequest);
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/filters/" + id, request.getEndpoint());
|
||||
assertEquals("/_ml/filters/" + id, request.getEndpoint());
|
||||
assertThat(request.getParameters().get(PageParams.FROM.getPreferredName()), is(nullValue()));
|
||||
assertThat(request.getParameters().get(PageParams.SIZE.getPreferredName()), is(nullValue()));
|
||||
|
||||
|
@ -711,7 +711,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
Request request = MLRequestConverters.updateFilter(updateFilterRequest);
|
||||
|
||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||
assertThat(request.getEndpoint(), equalTo("/_xpack/ml/filters/"+filterId+"/_update"));
|
||||
assertThat(request.getEndpoint(), equalTo("/_ml/filters/"+filterId+"/_update"));
|
||||
try (XContentParser parser = createParser(JsonXContent.jsonXContent, request.getEntity().getContent())) {
|
||||
UpdateFilterRequest parsedFilterRequest = UpdateFilterRequest.PARSER.apply(parser, null);
|
||||
assertThat(parsedFilterRequest, equalTo(updateFilterRequest));
|
||||
|
@ -725,7 +725,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
Request request = MLRequestConverters.deleteFilter(deleteFilterRequest);
|
||||
|
||||
assertEquals(HttpDelete.METHOD_NAME, request.getMethod());
|
||||
assertThat(request.getEndpoint(), equalTo("/_xpack/ml/filters/foo"));
|
||||
assertThat(request.getEndpoint(), equalTo("/_ml/filters/foo"));
|
||||
assertNull(request.getEntity());
|
||||
}
|
||||
|
||||
|
@ -735,7 +735,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
Request request = MLRequestConverters.mlInfo(infoRequest);
|
||||
|
||||
assertEquals(HttpGet.METHOD_NAME, request.getMethod());
|
||||
assertThat(request.getEndpoint(), equalTo("/_xpack/ml/info"));
|
||||
assertThat(request.getEndpoint(), equalTo("/_ml/info"));
|
||||
assertNull(request.getEntity());
|
||||
}
|
||||
|
||||
|
@ -747,7 +747,7 @@ public class MLRequestConvertersTests extends ESTestCase {
|
|||
Request request = MLRequestConverters.findFileStructure(findFileStructureRequest);
|
||||
|
||||
assertEquals(HttpPost.METHOD_NAME, request.getMethod());
|
||||
assertEquals("/_xpack/ml/find_file_structure", request.getEndpoint());
|
||||
assertEquals("/_ml/find_file_structure", request.getEndpoint());
|
||||
if (findFileStructureRequest.getLinesToSample() != null) {
|
||||
assertEquals(findFileStructureRequest.getLinesToSample(), Integer.valueOf(request.getParameters().get("lines_to_sample")));
|
||||
} else {
|
||||
|
|
|
@ -24,7 +24,7 @@ example:
|
|||
|
||||
[source,js]
|
||||
----------------------------------
|
||||
PUT _xpack/ml/anomaly_detectors/farequote
|
||||
PUT _ml/anomaly_detectors/farequote
|
||||
{
|
||||
"analysis_config": {
|
||||
"bucket_span": "60m",
|
||||
|
@ -55,7 +55,7 @@ The aggregations are defined in the {dfeed} as follows:
|
|||
|
||||
[source,js]
|
||||
----------------------------------
|
||||
PUT _xpack/ml/datafeeds/datafeed-farequote
|
||||
PUT _ml/datafeeds/datafeed-farequote
|
||||
{
|
||||
"job_id":"farequote",
|
||||
"indices": ["farequote"],
|
||||
|
|
|
@ -15,11 +15,11 @@ operations, but you can still explore and navigate results.
|
|||
|
||||
==== Request
|
||||
|
||||
`POST _xpack/ml/anomaly_detectors/<job_id>/_close` +
|
||||
`POST _ml/anomaly_detectors/<job_id>/_close` +
|
||||
|
||||
`POST _xpack/ml/anomaly_detectors/<job_id>,<job_id>/_close` +
|
||||
`POST _ml/anomaly_detectors/<job_id>,<job_id>/_close` +
|
||||
|
||||
`POST _xpack/ml/anomaly_detectors/_all/_close` +
|
||||
`POST _ml/anomaly_detectors/_all/_close` +
|
||||
|
||||
|
||||
==== Description
|
||||
|
@ -78,7 +78,7 @@ The following example closes the `total-requests` job:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST _xpack/ml/anomaly_detectors/total-requests/_close
|
||||
POST _ml/anomaly_detectors/total-requests/_close
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:setup:server_metrics_openjob]
|
||||
|
|
|
@ -11,7 +11,7 @@ Deletes scheduled events from a calendar.
|
|||
|
||||
==== Request
|
||||
|
||||
`DELETE _xpack/ml/calendars/<calendar_id>/events/<event_id>`
|
||||
`DELETE _ml/calendars/<calendar_id>/events/<event_id>`
|
||||
|
||||
|
||||
==== Description
|
||||
|
@ -42,7 +42,7 @@ calendar:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
DELETE _xpack/ml/calendars/planned-outages/events/LS8LJGEBMTCMA-qz49st
|
||||
DELETE _ml/calendars/planned-outages/events/LS8LJGEBMTCMA-qz49st
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:catch:missing]
|
||||
|
|
|
@ -11,7 +11,7 @@ Deletes jobs from a calendar.
|
|||
|
||||
==== Request
|
||||
|
||||
`DELETE _xpack/ml/calendars/<calendar_id>/jobs/<job_id>`
|
||||
`DELETE _ml/calendars/<calendar_id>/jobs/<job_id>`
|
||||
|
||||
|
||||
==== Path Parameters
|
||||
|
@ -36,7 +36,7 @@ calendar and `total-requests` job:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
DELETE _xpack/ml/calendars/planned-outages/jobs/total-requests
|
||||
DELETE _ml/calendars/planned-outages/jobs/total-requests
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:setup:calendar_outages_addjob]
|
||||
|
|
|
@ -11,7 +11,7 @@ Deletes a calendar.
|
|||
|
||||
==== Request
|
||||
|
||||
`DELETE _xpack/ml/calendars/<calendar_id>`
|
||||
`DELETE _ml/calendars/<calendar_id>`
|
||||
|
||||
|
||||
==== Description
|
||||
|
@ -38,7 +38,7 @@ The following example deletes the `planned-outages` calendar:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
DELETE _xpack/ml/calendars/planned-outages
|
||||
DELETE _ml/calendars/planned-outages
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:setup:calendar_outages]
|
||||
|
|
|
@ -11,7 +11,7 @@ Deletes an existing {dfeed}.
|
|||
|
||||
==== Request
|
||||
|
||||
`DELETE _xpack/ml/datafeeds/<feed_id>`
|
||||
`DELETE _ml/datafeeds/<feed_id>`
|
||||
|
||||
|
||||
==== Description
|
||||
|
@ -45,7 +45,7 @@ The following example deletes the `datafeed-total-requests` {dfeed}:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
DELETE _xpack/ml/datafeeds/datafeed-total-requests
|
||||
DELETE _ml/datafeeds/datafeed-total-requests
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:setup:server_metrics_datafeed]
|
||||
|
|
|
@ -10,7 +10,7 @@ Deletes expired and unused machine learning data.
|
|||
|
||||
==== Request
|
||||
|
||||
`DELETE _xpack/ml/_delete_expired_data`
|
||||
`DELETE _ml/_delete_expired_data`
|
||||
|
||||
==== Description
|
||||
|
||||
|
@ -32,7 +32,7 @@ The endpoint takes no arguments:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
DELETE _xpack/ml/_delete_expired_data
|
||||
DELETE _ml/_delete_expired_data
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST
|
||||
|
|
|
@ -11,7 +11,7 @@ Deletes a filter.
|
|||
|
||||
==== Request
|
||||
|
||||
`DELETE _xpack/ml/filters/<filter_id>`
|
||||
`DELETE _ml/filters/<filter_id>`
|
||||
|
||||
|
||||
==== Description
|
||||
|
@ -39,7 +39,7 @@ The following example deletes the `safe_domains` filter:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
DELETE _xpack/ml/filters/safe_domains
|
||||
DELETE _ml/filters/safe_domains
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:setup:ml_filter_safe_domains]
|
||||
|
|
|
@ -10,11 +10,11 @@ Deletes forecasts from a {ml} job.
|
|||
|
||||
==== Request
|
||||
|
||||
`DELETE _xpack/ml/anomaly_detectors/<job_id>/_forecast` +
|
||||
`DELETE _ml/anomaly_detectors/<job_id>/_forecast` +
|
||||
|
||||
`DELETE _xpack/ml/anomaly_detectors/<job_id>/_forecast/<forecast_id>` +
|
||||
`DELETE _ml/anomaly_detectors/<job_id>/_forecast/<forecast_id>` +
|
||||
|
||||
`DELETE _xpack/ml/anomaly_detectors/<job_id>/_forecast/_all`
|
||||
`DELETE _ml/anomaly_detectors/<job_id>/_forecast/_all`
|
||||
|
||||
|
||||
==== Description
|
||||
|
@ -63,7 +63,7 @@ The following example deletes all forecasts from the `total-requests` job:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
DELETE _xpack/ml/anomaly_detectors/total-requests/_forecast/_all
|
||||
DELETE _ml/anomaly_detectors/total-requests/_forecast/_all
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:setup:server_metrics_openjob]
|
||||
|
|
|
@ -11,7 +11,7 @@ Deletes an existing anomaly detection job.
|
|||
|
||||
==== Request
|
||||
|
||||
`DELETE _xpack/ml/anomaly_detectors/<job_id>`
|
||||
`DELETE _ml/anomaly_detectors/<job_id>`
|
||||
|
||||
|
||||
==== Description
|
||||
|
@ -57,7 +57,7 @@ The following example deletes the `total-requests` job:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
DELETE _xpack/ml/anomaly_detectors/total-requests
|
||||
DELETE _ml/anomaly_detectors/total-requests
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:setup:server_metrics_job]
|
||||
|
@ -75,7 +75,7 @@ In the next example we delete the `total-requests` job asynchronously:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
DELETE _xpack/ml/anomaly_detectors/total-requests?wait_for_completion=false
|
||||
DELETE _ml/anomaly_detectors/total-requests?wait_for_completion=false
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:setup:server_metrics_job]
|
||||
|
@ -88,4 +88,4 @@ of the job deletion task:
|
|||
"task": "oTUltX4IQMOUUVeiohTt8A:39"
|
||||
}
|
||||
----
|
||||
// TESTRESPONSE[s/"task": "oTUltX4IQMOUUVeiohTt8A:39"/"task": $body.task/]
|
||||
// TESTRESPONSE[s/"task": "oTUltX4IQMOUUVeiohTt8A:39"/"task": $body.task/]
|
||||
|
|
|
@ -11,7 +11,7 @@ Deletes an existing model snapshot.
|
|||
|
||||
==== Request
|
||||
|
||||
`DELETE _xpack/ml/anomaly_detectors/<job_id>/model_snapshots/<snapshot_id>`
|
||||
`DELETE _ml/anomaly_detectors/<job_id>/model_snapshots/<snapshot_id>`
|
||||
|
||||
|
||||
==== Description
|
||||
|
@ -41,7 +41,7 @@ The following example deletes the `1491948163` snapshot:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
DELETE _xpack/ml/anomaly_detectors/farequote/model_snapshots/1491948163
|
||||
DELETE _ml/anomaly_detectors/farequote/model_snapshots/1491948163
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:todo]
|
||||
|
@ -53,4 +53,4 @@ When the snapshot is deleted, you receive the following results:
|
|||
"acknowledged": true
|
||||
}
|
||||
----
|
||||
// TESTRESPONSE
|
||||
// TESTRESPONSE
|
||||
|
|
|
@ -13,7 +13,7 @@ suitable to be ingested into {es}.
|
|||
|
||||
==== Request
|
||||
|
||||
`POST _xpack/ml/find_file_structure`
|
||||
`POST _ml/find_file_structure`
|
||||
|
||||
|
||||
==== Description
|
||||
|
@ -220,7 +220,7 @@ some books. You can send the contents to the `find_file_structure` endpoint:
|
|||
|
||||
[source,js]
|
||||
----
|
||||
POST _xpack/ml/find_file_structure
|
||||
POST _ml/find_file_structure
|
||||
{"name": "Leviathan Wakes", "author": "James S.A. Corey", "release_date": "2011-06-02", "page_count": 561}
|
||||
{"name": "Hyperion", "author": "Dan Simmons", "release_date": "1989-05-26", "page_count": 482}
|
||||
{"name": "Dune", "author": "Frank Herbert", "release_date": "1965-06-01", "page_count": 604}
|
||||
|
@ -501,7 +501,7 @@ to match what is specified in the `head` command.
|
|||
|
||||
[source,js]
|
||||
----
|
||||
curl -s "s3.amazonaws.com/nyc-tlc/trip+data/yellow_tripdata_2018-06.csv" | head -20000 | curl -s -H "Content-Type: application/json" -XPOST "localhost:9200/_xpack/ml/find_file_structure?pretty&lines_to_sample=20000" -T -
|
||||
curl -s "s3.amazonaws.com/nyc-tlc/trip+data/yellow_tripdata_2018-06.csv" | head -20000 | curl -s -H "Content-Type: application/json" -XPOST "localhost:9200/_ml/find_file_structure?pretty&lines_to_sample=20000" -T -
|
||||
----
|
||||
// NOTCONSOLE
|
||||
// Not converting to console because this shows how curl can be used
|
||||
|
@ -1305,7 +1305,7 @@ analysis:
|
|||
|
||||
[source,js]
|
||||
----
|
||||
curl -s "s3.amazonaws.com/nyc-tlc/trip+data/yellow_tripdata_2018-06.csv" | head -200000 | curl -s -H "Content-Type: application/json" -XPOST "localhost:9200/_xpack/ml/find_file_structure?pretty&lines_to_sample=200000&timeout=1s" -T -
|
||||
curl -s "s3.amazonaws.com/nyc-tlc/trip+data/yellow_tripdata_2018-06.csv" | head -200000 | curl -s -H "Content-Type: application/json" -XPOST "localhost:9200/_ml/find_file_structure?pretty&lines_to_sample=200000&timeout=1s" -T -
|
||||
----
|
||||
// NOTCONSOLE
|
||||
// Not converting to console because this shows how curl can be used
|
||||
|
@ -1342,7 +1342,7 @@ This is an example of analyzing {es}'s own log file:
|
|||
|
||||
[source,js]
|
||||
----
|
||||
curl -s -H "Content-Type: application/json" -XPOST "localhost:9200/_xpack/ml/find_file_structure?pretty" -T "$ES_HOME/logs/elasticsearch.log"
|
||||
curl -s -H "Content-Type: application/json" -XPOST "localhost:9200/_ml/find_file_structure?pretty" -T "$ES_HOME/logs/elasticsearch.log"
|
||||
----
|
||||
// NOTCONSOLE
|
||||
// Not converting to console because this shows how curl can be used
|
||||
|
@ -1496,7 +1496,7 @@ query parameter (appropriately URL escaped):
|
|||
|
||||
[source,js]
|
||||
----
|
||||
curl -s -H "Content-Type: application/json" -XPOST "localhost:9200/_xpack/ml/find_file_structure?pretty&format=semi_structured_text&grok_pattern=%5C%5B%25%7BTIMESTAMP_ISO8601:timestamp%7D%5C%5D%5C%5B%25%7BLOGLEVEL:loglevel%7D%20*%5C%5D%5C%5B%25%7BJAVACLASS:class%7D%20*%5C%5D%20%5C%5B%25%7BHOSTNAME:node%7D%5C%5D%20%25%7BJAVALOGMESSAGE:message%7D" -T "$ES_HOME/logs/elasticsearch.log"
|
||||
curl -s -H "Content-Type: application/json" -XPOST "localhost:9200/_ml/find_file_structure?pretty&format=semi_structured_text&grok_pattern=%5C%5B%25%7BTIMESTAMP_ISO8601:timestamp%7D%5C%5D%5C%5B%25%7BLOGLEVEL:loglevel%7D%20*%5C%5D%5C%5B%25%7BJAVACLASS:class%7D%20*%5C%5D%20%5C%5B%25%7BHOSTNAME:node%7D%5C%5D%20%25%7BJAVALOGMESSAGE:message%7D" -T "$ES_HOME/logs/elasticsearch.log"
|
||||
----
|
||||
// NOTCONSOLE
|
||||
// Not converting to console because this shows how curl can be used
|
||||
|
|
|
@ -11,7 +11,7 @@ Forces any buffered data to be processed by the job.
|
|||
|
||||
==== Request
|
||||
|
||||
`POST _xpack/ml/anomaly_detectors/<job_id>/_flush`
|
||||
`POST _ml/anomaly_detectors/<job_id>/_flush`
|
||||
|
||||
|
||||
==== Description
|
||||
|
@ -69,7 +69,7 @@ The following example flushes the `total-requests` job:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST _xpack/ml/anomaly_detectors/total-requests/_flush
|
||||
POST _ml/anomaly_detectors/total-requests/_flush
|
||||
{
|
||||
"calc_interim": true
|
||||
}
|
||||
|
@ -96,7 +96,7 @@ on January 1, 2018:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST _xpack/ml/anomaly_detectors/total-requests/_flush
|
||||
POST _ml/anomaly_detectors/total-requests/_flush
|
||||
{
|
||||
"advance_time": "1514804400"
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ Predicts the future behavior of a time series by using its historical behavior.
|
|||
|
||||
==== Request
|
||||
|
||||
`POST _xpack/ml/anomaly_detectors/<job_id>/_forecast`
|
||||
`POST _ml/anomaly_detectors/<job_id>/_forecast`
|
||||
|
||||
|
||||
==== Description
|
||||
|
@ -57,7 +57,7 @@ The following example requests a 10 day forecast for the `total-requests` job:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST _xpack/ml/anomaly_detectors/total-requests/_forecast
|
||||
POST _ml/anomaly_detectors/total-requests/_forecast
|
||||
{
|
||||
"duration": "10d"
|
||||
}
|
||||
|
|
|
@ -11,9 +11,9 @@ Retrieves job results for one or more buckets.
|
|||
|
||||
==== Request
|
||||
|
||||
`GET _xpack/ml/anomaly_detectors/<job_id>/results/buckets` +
|
||||
`GET _ml/anomaly_detectors/<job_id>/results/buckets` +
|
||||
|
||||
`GET _xpack/ml/anomaly_detectors/<job_id>/results/buckets/<timestamp>`
|
||||
`GET _ml/anomaly_detectors/<job_id>/results/buckets/<timestamp>`
|
||||
|
||||
|
||||
==== Description
|
||||
|
@ -90,7 +90,7 @@ The following example gets bucket information for the `it-ops-kpi` job:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET _xpack/ml/anomaly_detectors/it-ops-kpi/results/buckets
|
||||
GET _ml/anomaly_detectors/it-ops-kpi/results/buckets
|
||||
{
|
||||
"anomaly_score": 80,
|
||||
"start": "1454530200001"
|
||||
|
|
|
@ -12,9 +12,9 @@ calendars.
|
|||
|
||||
==== Request
|
||||
|
||||
`GET _xpack/ml/calendars/<calendar_id>/events` +
|
||||
`GET _ml/calendars/<calendar_id>/events` +
|
||||
|
||||
`GET _xpack/ml/calendars/_all/events`
|
||||
`GET _ml/calendars/_all/events`
|
||||
|
||||
|
||||
===== Description
|
||||
|
@ -64,7 +64,7 @@ The following example gets information about the scheduled events in the
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET _xpack/ml/calendars/planned-outages/events
|
||||
GET _ml/calendars/planned-outages/events
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:setup:calendar_outages_addevent]
|
||||
|
|
|
@ -11,9 +11,9 @@ Retrieves configuration information for calendars.
|
|||
|
||||
==== Request
|
||||
|
||||
`GET _xpack/ml/calendars/<calendar_id>` +
|
||||
`GET _ml/calendars/<calendar_id>` +
|
||||
|
||||
`GET _xpack/ml/calendars/_all`
|
||||
`GET _ml/calendars/_all`
|
||||
|
||||
|
||||
===== Description
|
||||
|
@ -60,7 +60,7 @@ calendar:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET _xpack/ml/calendars/planned-outages
|
||||
GET _ml/calendars/planned-outages
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:setup:calendar_outages_addjob]
|
||||
|
|
|
@ -11,9 +11,9 @@ Retrieves job results for one or more categories.
|
|||
|
||||
==== Request
|
||||
|
||||
`GET _xpack/ml/anomaly_detectors/<job_id>/results/categories` +
|
||||
`GET _ml/anomaly_detectors/<job_id>/results/categories` +
|
||||
|
||||
`GET _xpack/ml/anomaly_detectors/<job_id>/results/categories/<category_id>`
|
||||
`GET _ml/anomaly_detectors/<job_id>/results/categories/<category_id>`
|
||||
|
||||
==== Description
|
||||
|
||||
|
@ -65,7 +65,7 @@ The following example gets information about one category for the
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET _xpack/ml/anomaly_detectors/esxi_log/results/categories
|
||||
GET _ml/anomaly_detectors/esxi_log/results/categories
|
||||
{
|
||||
"page":{
|
||||
"size": 1
|
||||
|
|
|
@ -12,13 +12,13 @@ Retrieves usage information for {dfeeds}.
|
|||
==== Request
|
||||
|
||||
|
||||
`GET _xpack/ml/datafeeds/<feed_id>/_stats` +
|
||||
`GET _ml/datafeeds/<feed_id>/_stats` +
|
||||
|
||||
`GET _xpack/ml/datafeeds/<feed_id>,<feed_id>/_stats` +
|
||||
`GET _ml/datafeeds/<feed_id>,<feed_id>/_stats` +
|
||||
|
||||
`GET _xpack/ml/datafeeds/_stats` +
|
||||
`GET _ml/datafeeds/_stats` +
|
||||
|
||||
`GET _xpack/ml/datafeeds/_all/_stats` +
|
||||
`GET _ml/datafeeds/_all/_stats` +
|
||||
|
||||
|
||||
|
||||
|
@ -64,7 +64,7 @@ The following example gets usage information for the
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET _xpack/ml/datafeeds/datafeed-total-requests/_stats
|
||||
GET _ml/datafeeds/datafeed-total-requests/_stats
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:setup:server_metrics_startdf]
|
||||
|
@ -98,4 +98,4 @@ The API returns the following results:
|
|||
// TESTRESPONSE[s/"node-0"/$body.$_path/]
|
||||
// TESTRESPONSE[s/"hoXMLZB0RWKfR9UPPUCxXX"/$body.$_path/]
|
||||
// TESTRESPONSE[s/"127.0.0.1:9300"/$body.$_path/]
|
||||
// TESTRESPONSE[s/"17179869184"/$body.datafeeds.0.node.attributes.ml\\.machine_memory/]
|
||||
// TESTRESPONSE[s/"17179869184"/$body.datafeeds.0.node.attributes.ml\\.machine_memory/]
|
||||
|
|
|
@ -11,13 +11,13 @@ Retrieves configuration information for {dfeeds}.
|
|||
==== Request
|
||||
|
||||
|
||||
`GET _xpack/ml/datafeeds/<feed_id>` +
|
||||
`GET _ml/datafeeds/<feed_id>` +
|
||||
|
||||
`GET _xpack/ml/datafeeds/<feed_id>,<feed_id>` +
|
||||
`GET _ml/datafeeds/<feed_id>,<feed_id>` +
|
||||
|
||||
`GET _xpack/ml/datafeeds/` +
|
||||
`GET _ml/datafeeds/` +
|
||||
|
||||
`GET _xpack/ml/datafeeds/_all` +
|
||||
`GET _ml/datafeeds/_all` +
|
||||
|
||||
|
||||
===== Description
|
||||
|
@ -58,7 +58,7 @@ The following example gets configuration information for the
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET _xpack/ml/datafeeds/datafeed-total-requests
|
||||
GET _ml/datafeeds/datafeed-total-requests
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:setup:server_metrics_datafeed]
|
||||
|
|
|
@ -11,9 +11,9 @@ Retrieves filters.
|
|||
|
||||
==== Request
|
||||
|
||||
`GET _xpack/ml/filters/<filter_id>` +
|
||||
`GET _ml/filters/<filter_id>` +
|
||||
|
||||
`GET _xpack/ml/filters/`
|
||||
`GET _ml/filters/`
|
||||
|
||||
|
||||
===== Description
|
||||
|
@ -60,7 +60,7 @@ filter:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET _xpack/ml/filters/safe_domains
|
||||
GET _ml/filters/safe_domains
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:setup:ml_filter_safe_domains]
|
||||
|
|
|
@ -11,7 +11,7 @@ Retrieves job results for one or more influencers.
|
|||
|
||||
==== Request
|
||||
|
||||
`GET _xpack/ml/anomaly_detectors/<job_id>/results/influencers`
|
||||
`GET _ml/anomaly_detectors/<job_id>/results/influencers`
|
||||
|
||||
//===== Description
|
||||
|
||||
|
@ -75,7 +75,7 @@ The following example gets influencer information for the `it_ops_new_kpi` job:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET _xpack/ml/anomaly_detectors/it_ops_new_kpi/results/influencers
|
||||
GET _ml/anomaly_detectors/it_ops_new_kpi/results/influencers
|
||||
{
|
||||
"sort": "influencer_score",
|
||||
"desc": true
|
||||
|
|
|
@ -13,13 +13,13 @@ Retrieves usage information for jobs.
|
|||
|
||||
|
||||
|
||||
`GET _xpack/ml/anomaly_detectors/<job_id>/_stats`
|
||||
`GET _ml/anomaly_detectors/<job_id>/_stats`
|
||||
|
||||
`GET _xpack/ml/anomaly_detectors/<job_id>,<job_id>/_stats` +
|
||||
`GET _ml/anomaly_detectors/<job_id>,<job_id>/_stats` +
|
||||
|
||||
`GET _xpack/ml/anomaly_detectors/_stats` +
|
||||
`GET _ml/anomaly_detectors/_stats` +
|
||||
|
||||
`GET _xpack/ml/anomaly_detectors/_all/_stats` +
|
||||
`GET _ml/anomaly_detectors/_all/_stats` +
|
||||
|
||||
|
||||
===== Description
|
||||
|
@ -60,7 +60,7 @@ The following example gets usage information for the `farequote` job:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET _xpack/ml/anomaly_detectors/farequote/_stats
|
||||
GET _ml/anomaly_detectors/farequote/_stats
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:todo]
|
||||
|
|
|
@ -11,13 +11,13 @@ Retrieves configuration information for jobs.
|
|||
|
||||
==== Request
|
||||
|
||||
`GET _xpack/ml/anomaly_detectors/<job_id>` +
|
||||
`GET _ml/anomaly_detectors/<job_id>` +
|
||||
|
||||
`GET _xpack/ml/anomaly_detectors/<job_id>,<job_id>` +
|
||||
`GET _ml/anomaly_detectors/<job_id>,<job_id>` +
|
||||
|
||||
`GET _xpack/ml/anomaly_detectors/` +
|
||||
`GET _ml/anomaly_detectors/` +
|
||||
|
||||
`GET _xpack/ml/anomaly_detectors/_all`
|
||||
`GET _ml/anomaly_detectors/_all`
|
||||
|
||||
|
||||
===== Description
|
||||
|
@ -57,7 +57,7 @@ The following example gets configuration information for the `total-requests` jo
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET _xpack/ml/anomaly_detectors/total-requests
|
||||
GET _ml/anomaly_detectors/total-requests
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:setup:server_metrics_job]
|
||||
|
|
|
@ -10,7 +10,7 @@ Returns defaults and limits used by machine learning.
|
|||
|
||||
==== Request
|
||||
|
||||
`GET _xpack/ml/info`
|
||||
`GET _ml/info`
|
||||
|
||||
==== Description
|
||||
|
||||
|
@ -35,7 +35,7 @@ The endpoint takes no arguments:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET _xpack/ml/info
|
||||
GET _ml/info
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST
|
||||
|
|
|
@ -11,11 +11,11 @@ bucket results of multiple jobs.
|
|||
|
||||
==== Request
|
||||
|
||||
`GET _xpack/ml/anomaly_detectors/<job_id>/results/overall_buckets` +
|
||||
`GET _ml/anomaly_detectors/<job_id>/results/overall_buckets` +
|
||||
|
||||
`GET _xpack/ml/anomaly_detectors/<job_id>,<job_id>/results/overall_buckets` +
|
||||
`GET _ml/anomaly_detectors/<job_id>,<job_id>/results/overall_buckets` +
|
||||
|
||||
`GET _xpack/ml/anomaly_detectors/_all/results/overall_buckets`
|
||||
`GET _ml/anomaly_detectors/_all/results/overall_buckets`
|
||||
|
||||
==== Description
|
||||
|
||||
|
@ -102,7 +102,7 @@ The following example gets overall buckets for jobs with IDs matching `job-*`:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET _xpack/ml/anomaly_detectors/job-*/results/overall_buckets
|
||||
GET _ml/anomaly_detectors/job-*/results/overall_buckets
|
||||
{
|
||||
"overall_score": 80,
|
||||
"start": "1403532000000"
|
||||
|
@ -148,7 +148,7 @@ The next example is similar but this time `top_n` is set to `2`:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET _xpack/ml/anomaly_detectors/job-*/results/overall_buckets
|
||||
GET _ml/anomaly_detectors/job-*/results/overall_buckets
|
||||
{
|
||||
"top_n": 2,
|
||||
"overall_score": 50.0,
|
||||
|
|
|
@ -11,7 +11,7 @@ Retrieves anomaly records for a job.
|
|||
|
||||
==== Request
|
||||
|
||||
`GET _xpack/ml/anomaly_detectors/<job_id>/results/records`
|
||||
`GET _ml/anomaly_detectors/<job_id>/results/records`
|
||||
|
||||
//===== Description
|
||||
|
||||
|
@ -76,7 +76,7 @@ The following example gets record information for the `it-ops-kpi` job:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET _xpack/ml/anomaly_detectors/it-ops-kpi/results/records
|
||||
GET _ml/anomaly_detectors/it-ops-kpi/results/records
|
||||
{
|
||||
"sort": "record_score",
|
||||
"desc": true,
|
||||
|
|
|
@ -11,9 +11,9 @@ Retrieves information about model snapshots.
|
|||
|
||||
==== Request
|
||||
|
||||
`GET _xpack/ml/anomaly_detectors/<job_id>/model_snapshots` +
|
||||
`GET _ml/anomaly_detectors/<job_id>/model_snapshots` +
|
||||
|
||||
`GET _xpack/ml/anomaly_detectors/<job_id>/model_snapshots/<snapshot_id>`
|
||||
`GET _ml/anomaly_detectors/<job_id>/model_snapshots/<snapshot_id>`
|
||||
|
||||
//===== Description
|
||||
|
||||
|
@ -72,7 +72,7 @@ The following example gets model snapshot information for the
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET _xpack/ml/anomaly_detectors/farequote/model_snapshots
|
||||
GET _ml/anomaly_detectors/farequote/model_snapshots
|
||||
{
|
||||
"start": "1491852977000"
|
||||
}
|
||||
|
|
|
@ -364,7 +364,7 @@ If you omit the `categorization_analyzer`, the following default values are used
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST _xpack/ml/anomaly_detectors/_validate
|
||||
POST _ml/anomaly_detectors/_validate
|
||||
{
|
||||
"analysis_config" : {
|
||||
"categorization_analyzer" : {
|
||||
|
|
|
@ -13,7 +13,7 @@ A job can be opened and closed multiple times throughout its lifecycle.
|
|||
|
||||
==== Request
|
||||
|
||||
`POST _xpack/ml/anomaly_detectors/{job_id}/_open`
|
||||
`POST _ml/anomaly_detectors/{job_id}/_open`
|
||||
|
||||
|
||||
==== Description
|
||||
|
@ -51,7 +51,7 @@ property:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST _xpack/ml/anomaly_detectors/total-requests/_open
|
||||
POST _ml/anomaly_detectors/total-requests/_open
|
||||
{
|
||||
"timeout": "35m"
|
||||
}
|
||||
|
|
|
@ -10,7 +10,7 @@ Posts scheduled events in a calendar.
|
|||
|
||||
==== Request
|
||||
|
||||
`POST _xpack/ml/calendars/<calendar_id>/events`
|
||||
`POST _ml/calendars/<calendar_id>/events`
|
||||
|
||||
|
||||
==== Description
|
||||
|
@ -43,7 +43,7 @@ You can add scheduled events to the `planned-outages` calendar as follows:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST _xpack/ml/calendars/planned-outages/events
|
||||
POST _ml/calendars/planned-outages/events
|
||||
{
|
||||
"events" : [
|
||||
{"description": "event 1", "start_time": 1513641600000, "end_time": 1513728000000},
|
||||
|
|
|
@ -11,7 +11,7 @@ Sends data to an anomaly detection job for analysis.
|
|||
|
||||
==== Request
|
||||
|
||||
`POST _xpack/ml/anomaly_detectors/<job_id>/_data`
|
||||
`POST _ml/anomaly_detectors/<job_id>/_data`
|
||||
|
||||
|
||||
==== Description
|
||||
|
@ -79,7 +79,7 @@ The following example posts data from the it_ops_new_kpi.json file to the `it_op
|
|||
[source,js]
|
||||
--------------------------------------------------
|
||||
$ curl -s -H "Content-type: application/json"
|
||||
-X POST http:\/\/localhost:9200/_xpack/ml/anomaly_detectors/it_ops_new_kpi/_data
|
||||
-X POST http:\/\/localhost:9200/_ml/anomaly_detectors/it_ops_new_kpi/_data
|
||||
--data-binary @it_ops_new_kpi.json
|
||||
--------------------------------------------------
|
||||
|
||||
|
|
|
@ -11,7 +11,7 @@ Previews a {dfeed}.
|
|||
|
||||
==== Request
|
||||
|
||||
`GET _xpack/ml/datafeeds/<datafeed_id>/_preview`
|
||||
`GET _ml/datafeeds/<datafeed_id>/_preview`
|
||||
|
||||
|
||||
==== Description
|
||||
|
@ -51,7 +51,7 @@ The following example obtains a preview of the `datafeed-farequote` {dfeed}:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
GET _xpack/ml/datafeeds/datafeed-farequote/_preview
|
||||
GET _ml/datafeeds/datafeed-farequote/_preview
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:setup:farequote_datafeed]
|
||||
|
|
|
@ -10,7 +10,7 @@ Adds a job to a calendar.
|
|||
|
||||
==== Request
|
||||
|
||||
`PUT _xpack/ml/calendars/<calendar_id>/jobs/<job_id>`
|
||||
`PUT _ml/calendars/<calendar_id>/jobs/<job_id>`
|
||||
|
||||
|
||||
==== Path Parameters
|
||||
|
@ -36,7 +36,7 @@ The following example associates the `planned-outages` calendar with the
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT _xpack/ml/calendars/planned-outages/jobs/total-requests
|
||||
PUT _ml/calendars/planned-outages/jobs/total-requests
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:setup:calendar_outages_openjob]
|
||||
|
|
|
@ -10,7 +10,7 @@ Instantiates a calendar.
|
|||
|
||||
==== Request
|
||||
|
||||
`PUT _xpack/ml/calendars/<calendar_id>`
|
||||
`PUT _ml/calendars/<calendar_id>`
|
||||
|
||||
===== Description
|
||||
|
||||
|
@ -42,7 +42,7 @@ The following example creates the `planned-outages` calendar:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT _xpack/ml/calendars/planned-outages
|
||||
PUT _ml/calendars/planned-outages
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:need-license]
|
||||
|
|
|
@ -11,7 +11,7 @@ Instantiates a {dfeed}.
|
|||
|
||||
==== Request
|
||||
|
||||
`PUT _xpack/ml/datafeeds/<feed_id>`
|
||||
`PUT _ml/datafeeds/<feed_id>`
|
||||
|
||||
|
||||
==== Description
|
||||
|
@ -105,7 +105,7 @@ The following example creates the `datafeed-total-requests` {dfeed}:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT _xpack/ml/datafeeds/datafeed-total-requests
|
||||
PUT _ml/datafeeds/datafeed-total-requests
|
||||
{
|
||||
"job_id": "total-requests",
|
||||
"indices": ["server-metrics"]
|
||||
|
@ -137,4 +137,4 @@ When the {dfeed} is created, you receive the following results:
|
|||
}
|
||||
----
|
||||
// TESTRESPONSE[s/"query_delay": "83474ms"/"query_delay": $body.query_delay/]
|
||||
// TESTRESPONSE[s/"query.boost": "1.0"/"query.boost": $body.query.boost/]
|
||||
// TESTRESPONSE[s/"query.boost": "1.0"/"query.boost": $body.query.boost/]
|
||||
|
|
|
@ -10,7 +10,7 @@ Instantiates a filter.
|
|||
|
||||
==== Request
|
||||
|
||||
`PUT _xpack/ml/filters/<filter_id>`
|
||||
`PUT _ml/filters/<filter_id>`
|
||||
|
||||
===== Description
|
||||
|
||||
|
@ -49,7 +49,7 @@ The following example creates the `safe_domains` filter:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT _xpack/ml/filters/safe_domains
|
||||
PUT _ml/filters/safe_domains
|
||||
{
|
||||
"description": "A list of safe domains",
|
||||
"items": ["*.google.com", "wikipedia.org"]
|
||||
|
|
|
@ -10,7 +10,7 @@ Instantiates a job.
|
|||
|
||||
==== Request
|
||||
|
||||
`PUT _xpack/ml/anomaly_detectors/<job_id>`
|
||||
`PUT _ml/anomaly_detectors/<job_id>`
|
||||
|
||||
//===== Description
|
||||
|
||||
|
@ -85,7 +85,7 @@ The following example creates the `total-requests` job:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT _xpack/ml/anomaly_detectors/total-requests
|
||||
PUT _ml/anomaly_detectors/total-requests
|
||||
{
|
||||
"description" : "Total sum of requests",
|
||||
"analysis_config" : {
|
||||
|
|
|
@ -10,7 +10,7 @@ Reverts to a specific snapshot.
|
|||
|
||||
==== Request
|
||||
|
||||
`POST _xpack/ml/anomaly_detectors/<job_id>/model_snapshots/<snapshot_id>/_revert`
|
||||
`POST _ml/anomaly_detectors/<job_id>/model_snapshots/<snapshot_id>/_revert`
|
||||
|
||||
|
||||
==== Description
|
||||
|
@ -61,7 +61,7 @@ The following example reverts to the `1491856080` snapshot for the
|
|||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST
|
||||
_xpack/ml/anomaly_detectors/it_ops_new_kpi/model_snapshots/1491856080/_revert
|
||||
_ml/anomaly_detectors/it_ops_new_kpi/model_snapshots/1491856080/_revert
|
||||
{
|
||||
"delete_intervening_results": true
|
||||
}
|
||||
|
|
|
@ -12,7 +12,7 @@ A {dfeed} can be started and stopped multiple times throughout its lifecycle.
|
|||
|
||||
==== Request
|
||||
|
||||
`POST _xpack/ml/datafeeds/<feed_id>/_start`
|
||||
`POST _ml/datafeeds/<feed_id>/_start`
|
||||
|
||||
==== Description
|
||||
|
||||
|
@ -95,7 +95,7 @@ The following example starts the `datafeed-it-ops-kpi` {dfeed}:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST _xpack/ml/datafeeds/datafeed-total-requests/_start
|
||||
POST _ml/datafeeds/datafeed-total-requests/_start
|
||||
{
|
||||
"start": "2017-04-07T18:22:16Z"
|
||||
}
|
||||
|
@ -110,4 +110,4 @@ When the {dfeed} starts, you receive the following results:
|
|||
"started": true
|
||||
}
|
||||
----
|
||||
// TESTRESPONSE
|
||||
// TESTRESPONSE
|
||||
|
|
|
@ -13,11 +13,11 @@ A {dfeed} can be started and stopped multiple times throughout its lifecycle.
|
|||
|
||||
==== Request
|
||||
|
||||
`POST _xpack/ml/datafeeds/<feed_id>/_stop` +
|
||||
`POST _ml/datafeeds/<feed_id>/_stop` +
|
||||
|
||||
`POST _xpack/ml/datafeeds/<feed_id>,<feed_id>/_stop` +
|
||||
`POST _ml/datafeeds/<feed_id>,<feed_id>/_stop` +
|
||||
|
||||
`POST _xpack/ml/datafeeds/_all/_stop`
|
||||
`POST _ml/datafeeds/_all/_stop`
|
||||
|
||||
|
||||
===== Description
|
||||
|
@ -57,7 +57,7 @@ The following example stops the `datafeed-total-requests` {dfeed}:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST _xpack/ml/datafeeds/datafeed-total-requests/_stop
|
||||
POST _ml/datafeeds/datafeed-total-requests/_stop
|
||||
{
|
||||
"timeout": "30s"
|
||||
}
|
||||
|
@ -73,4 +73,4 @@ When the {dfeed} stops, you receive the following results:
|
|||
"stopped": true
|
||||
}
|
||||
----
|
||||
// TESTRESPONSE
|
||||
// TESTRESPONSE
|
||||
|
|
|
@ -10,7 +10,7 @@ Updates certain properties of a {dfeed}.
|
|||
|
||||
==== Request
|
||||
|
||||
`POST _xpack/ml/datafeeds/<feed_id>/_update`
|
||||
`POST _ml/datafeeds/<feed_id>/_update`
|
||||
|
||||
//===== Description
|
||||
|
||||
|
@ -97,7 +97,7 @@ The following example updates the query for the `datafeed-total-requests`
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST _xpack/ml/datafeeds/datafeed-total-requests/_update
|
||||
POST _ml/datafeeds/datafeed-total-requests/_update
|
||||
{
|
||||
"query": {
|
||||
"term": {
|
||||
|
|
|
@ -10,7 +10,7 @@ Updates the description of a filter, adds items, or removes items.
|
|||
|
||||
==== Request
|
||||
|
||||
`POST _xpack/ml/filters/<filter_id>/_update`
|
||||
`POST _ml/filters/<filter_id>/_update`
|
||||
|
||||
//==== Description
|
||||
|
||||
|
@ -45,7 +45,7 @@ You can change the description, add and remove items to the `safe_domains` filte
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST _xpack/ml/filters/safe_domains/_update
|
||||
POST _ml/filters/safe_domains/_update
|
||||
{
|
||||
"description": "Updated list of domains",
|
||||
"add_items": ["*.myorg.com"],
|
||||
|
|
|
@ -10,7 +10,7 @@ Updates certain properties of a job.
|
|||
|
||||
==== Request
|
||||
|
||||
`POST _xpack/ml/anomaly_detectors/<job_id>/_update`
|
||||
`POST _ml/anomaly_detectors/<job_id>/_update`
|
||||
|
||||
|
||||
==== Path Parameters
|
||||
|
@ -99,7 +99,7 @@ The following example updates the `total-requests` job:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST _xpack/ml/anomaly_detectors/total-requests/_update
|
||||
POST _ml/anomaly_detectors/total-requests/_update
|
||||
{
|
||||
"description":"An updated job",
|
||||
"groups": ["group1","group2"],
|
||||
|
@ -178,4 +178,4 @@ information, including the updated property values. For example:
|
|||
}
|
||||
----
|
||||
// TESTRESPONSE[s/"job_version": "7.0.0-alpha1"/"job_version": $body.job_version/]
|
||||
// TESTRESPONSE[s/"create_time": 1518808660505/"create_time": $body.create_time/]
|
||||
// TESTRESPONSE[s/"create_time": 1518808660505/"create_time": $body.create_time/]
|
||||
|
|
|
@ -10,7 +10,7 @@ Updates certain properties of a snapshot.
|
|||
|
||||
==== Request
|
||||
|
||||
`POST _xpack/ml/anomaly_detectors/<job_id>/model_snapshots/<snapshot_id>/_update`
|
||||
`POST _ml/anomaly_detectors/<job_id>/model_snapshots/<snapshot_id>/_update`
|
||||
|
||||
|
||||
//==== Description
|
||||
|
@ -53,7 +53,7 @@ The following example updates the snapshot identified as `1491852978`:
|
|||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST
|
||||
_xpack/ml/anomaly_detectors/it_ops_new_logs/model_snapshots/1491852978/_update
|
||||
_ml/anomaly_detectors/it_ops_new_logs/model_snapshots/1491852978/_update
|
||||
{
|
||||
"description": "Snapshot 1",
|
||||
"retain": true
|
||||
|
|
|
@ -10,7 +10,7 @@ Validates detector configuration information.
|
|||
|
||||
==== Request
|
||||
|
||||
`POST _xpack/ml/anomaly_detectors/_validate/detector`
|
||||
`POST _ml/anomaly_detectors/_validate/detector`
|
||||
|
||||
==== Description
|
||||
|
||||
|
@ -37,7 +37,7 @@ The following example validates detector configuration information:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST _xpack/ml/anomaly_detectors/_validate/detector
|
||||
POST _ml/anomaly_detectors/_validate/detector
|
||||
{
|
||||
"function": "metric",
|
||||
"field_name": "responsetime",
|
||||
|
@ -54,4 +54,4 @@ When the validation completes, you receive the following results:
|
|||
"acknowledged": true
|
||||
}
|
||||
----
|
||||
// TESTRESPONSE
|
||||
// TESTRESPONSE
|
||||
|
|
|
@ -10,7 +10,7 @@ Validates job configuration information.
|
|||
|
||||
==== Request
|
||||
|
||||
`POST _xpack/ml/anomaly_detectors/_validate`
|
||||
`POST _ml/anomaly_detectors/_validate`
|
||||
|
||||
==== Description
|
||||
|
||||
|
@ -37,7 +37,7 @@ The following example validates job configuration information:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST _xpack/ml/anomaly_detectors/_validate
|
||||
POST _ml/anomaly_detectors/_validate
|
||||
{
|
||||
"description" : "Unusual response times by airlines",
|
||||
"analysis_config" : {
|
||||
|
@ -65,4 +65,4 @@ When the validation is complete, you receive the following results:
|
|||
"acknowledged": true
|
||||
}
|
||||
----
|
||||
// TESTRESPONSE
|
||||
// TESTRESPONSE
|
||||
|
|
|
@ -21,7 +21,7 @@ messages by using count or rare functions. For example:
|
|||
//Obtained from it_ops_new_app_logs.sh
|
||||
[source,js]
|
||||
----------------------------------
|
||||
PUT _xpack/ml/anomaly_detectors/it_ops_new_logs
|
||||
PUT _ml/anomaly_detectors/it_ops_new_logs
|
||||
{
|
||||
"description" : "IT Ops Application Logs",
|
||||
"analysis_config" : {
|
||||
|
@ -91,7 +91,7 @@ categorization field values are interpreted. For example:
|
|||
|
||||
[source,js]
|
||||
----------------------------------
|
||||
PUT _xpack/ml/anomaly_detectors/it_ops_new_logs2
|
||||
PUT _ml/anomaly_detectors/it_ops_new_logs2
|
||||
{
|
||||
"description" : "IT Ops Application Logs",
|
||||
"analysis_config" : {
|
||||
|
@ -153,7 +153,7 @@ equivalent to the following analyzer, which is defined using only built-in {es}
|
|||
|
||||
[source,js]
|
||||
----------------------------------
|
||||
PUT _xpack/ml/anomaly_detectors/it_ops_new_logs3
|
||||
PUT _ml/anomaly_detectors/it_ops_new_logs3
|
||||
{
|
||||
"description" : "IT Ops Application Logs",
|
||||
"analysis_config" : {
|
||||
|
|
|
@ -92,7 +92,7 @@ For example, the following API updates a job to add a custom URL that uses
|
|||
|
||||
[source,js]
|
||||
----------------------------------
|
||||
POST _xpack/ml/anomaly_detectors/sample_job/_update
|
||||
POST _ml/anomaly_detectors/sample_job/_update
|
||||
{
|
||||
"custom_settings": {
|
||||
"custom_urls": [
|
||||
|
|
|
@ -32,7 +32,7 @@ We create our filter using the {ref}/ml-put-filter.html[put filter API]:
|
|||
|
||||
[source,js]
|
||||
----------------------------------
|
||||
PUT _xpack/ml/filters/safe_domains
|
||||
PUT _ml/filters/safe_domains
|
||||
{
|
||||
"description": "Our list of safe domains",
|
||||
"items": ["safe.com", "trusted.com"]
|
||||
|
@ -46,7 +46,7 @@ filter for the `highest_registered_domain` field:
|
|||
|
||||
[source,js]
|
||||
----------------------------------
|
||||
PUT _xpack/ml/anomaly_detectors/dns_exfiltration_with_rule
|
||||
PUT _ml/anomaly_detectors/dns_exfiltration_with_rule
|
||||
{
|
||||
"analysis_config" : {
|
||||
"bucket_span":"5m",
|
||||
|
@ -79,7 +79,7 @@ domains that we want to add in the filter. We can do that by using the
|
|||
|
||||
[source,js]
|
||||
----------------------------------
|
||||
POST _xpack/ml/filters/safe_domains/_update
|
||||
POST _ml/filters/safe_domains/_update
|
||||
{
|
||||
"add_items": ["another-safe.com"]
|
||||
}
|
||||
|
@ -94,7 +94,7 @@ In the following example we scope multiple fields:
|
|||
|
||||
[source,js]
|
||||
----------------------------------
|
||||
PUT _xpack/ml/anomaly_detectors/scoping_multiple_fields
|
||||
PUT _ml/anomaly_detectors/scoping_multiple_fields
|
||||
{
|
||||
"analysis_config" : {
|
||||
"bucket_span":"5m",
|
||||
|
@ -144,7 +144,7 @@ utilization is less than 0.20.
|
|||
|
||||
[source,js]
|
||||
----------------------------------
|
||||
PUT _xpack/ml/anomaly_detectors/cpu_with_rule
|
||||
PUT _ml/anomaly_detectors/cpu_with_rule
|
||||
{
|
||||
"analysis_config" : {
|
||||
"bucket_span":"5m",
|
||||
|
@ -180,7 +180,7 @@ is greater than 30 and less than 50:
|
|||
|
||||
[source,js]
|
||||
----------------------------------
|
||||
PUT _xpack/ml/anomaly_detectors/rule_with_range
|
||||
PUT _ml/anomaly_detectors/rule_with_range
|
||||
{
|
||||
"analysis_config" : {
|
||||
"bucket_span":"5m",
|
||||
|
|
|
@ -45,7 +45,7 @@ see {ref}/ml-job-resource.html#ml-detectorconfig[Detector Configuration Objects]
|
|||
.Example 1: Analyzing events with the count function
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT _xpack/ml/anomaly_detectors/example1
|
||||
PUT _ml/anomaly_detectors/example1
|
||||
{
|
||||
"analysis_config": {
|
||||
"detectors": [{
|
||||
|
@ -71,7 +71,7 @@ and detects when the event rate is unusual compared to its past behavior.
|
|||
.Example 2: Analyzing errors with the high_count function
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT _xpack/ml/anomaly_detectors/example2
|
||||
PUT _ml/anomaly_detectors/example2
|
||||
{
|
||||
"analysis_config": {
|
||||
"detectors": [{
|
||||
|
@ -97,7 +97,7 @@ unusually high count of error codes compared to other users.
|
|||
.Example 3: Analyzing status codes with the low_count function
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT _xpack/ml/anomaly_detectors/example3
|
||||
PUT _ml/anomaly_detectors/example3
|
||||
{
|
||||
"analysis_config": {
|
||||
"detectors": [{
|
||||
|
@ -124,7 +124,7 @@ compared to its past behavior.
|
|||
.Example 4: Analyzing aggregated data with the count function
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT _xpack/ml/anomaly_detectors/example4
|
||||
PUT _ml/anomaly_detectors/example4
|
||||
{
|
||||
"analysis_config": {
|
||||
"summary_count_field_name" : "events_per_min",
|
||||
|
@ -189,7 +189,7 @@ The `non_zero_count` function models only the following data:
|
|||
.Example 5: Analyzing signatures with the high_non_zero_count function
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT _xpack/ml/anomaly_detectors/example5
|
||||
PUT _ml/anomaly_detectors/example5
|
||||
{
|
||||
"analysis_config": {
|
||||
"detectors": [{
|
||||
|
@ -243,7 +243,7 @@ see {ref}/ml-job-resource.html#ml-detectorconfig[Detector Configuration Objects]
|
|||
.Example 6: Analyzing users with the distinct_count function
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT _xpack/ml/anomaly_detectors/example6
|
||||
PUT _ml/anomaly_detectors/example6
|
||||
{
|
||||
"analysis_config": {
|
||||
"detectors": [{
|
||||
|
@ -268,7 +268,7 @@ users is unusual compared to the past.
|
|||
.Example 7: Analyzing ports with the high_distinct_count function
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT _xpack/ml/anomaly_detectors/example7
|
||||
PUT _ml/anomaly_detectors/example7
|
||||
{
|
||||
"analysis_config": {
|
||||
"detectors": [{
|
||||
|
|
|
@ -31,7 +31,7 @@ see {ref}/ml-job-resource.html#ml-detectorconfig[Detector Configuration Objects]
|
|||
.Example 1: Analyzing transactions with the lat_long function
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT _xpack/ml/anomaly_detectors/example1
|
||||
PUT _ml/anomaly_detectors/example1
|
||||
{
|
||||
"analysis_config": {
|
||||
"detectors": [{
|
||||
|
@ -79,7 +79,7 @@ format. For example, the following Painless script transforms
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT _xpack/ml/datafeeds/datafeed-test2
|
||||
PUT _ml/datafeeds/datafeed-test2
|
||||
{
|
||||
"job_id": "farequote",
|
||||
"indices": ["farequote"],
|
||||
|
|
|
@ -28,7 +28,7 @@ To specify the population, use the `over_field_name` property. For example:
|
|||
|
||||
[source,js]
|
||||
----------------------------------
|
||||
PUT _xpack/ml/anomaly_detectors/population
|
||||
PUT _ml/anomaly_detectors/population
|
||||
{
|
||||
"description" : "Population analysis",
|
||||
"analysis_config" : {
|
||||
|
|
|
@ -25,7 +25,7 @@ request stops the `feed1` {dfeed}:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST _xpack/ml/datafeeds/datafeed-total-requests/_stop
|
||||
POST _ml/datafeeds/datafeed-total-requests/_stop
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:setup:server_metrics_startdf]
|
||||
|
@ -46,7 +46,7 @@ If you are upgrading your cluster, you can use the following request to stop all
|
|||
|
||||
[source,js]
|
||||
----------------------------------
|
||||
POST _xpack/ml/datafeeds/_all/_stop
|
||||
POST _ml/datafeeds/_all/_stop
|
||||
----------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:needs-licence]
|
||||
|
@ -65,7 +65,7 @@ example, the following request closes the `job1` job:
|
|||
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST _xpack/ml/anomaly_detectors/total-requests/_close
|
||||
POST _ml/anomaly_detectors/total-requests/_close
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:setup:server_metrics_openjob]
|
||||
|
@ -84,7 +84,7 @@ all open jobs on the cluster:
|
|||
|
||||
[source,js]
|
||||
----------------------------------
|
||||
POST _xpack/ml/anomaly_detectors/_all/_close
|
||||
POST _ml/anomaly_detectors/_all/_close
|
||||
----------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:needs-licence]
|
||||
|
|
|
@ -105,7 +105,7 @@ version of the same field, use multi-fields. For more information, see
|
|||
.Example 1: Adding two numerical fields
|
||||
[source,js]
|
||||
----------------------------------
|
||||
PUT _xpack/ml/anomaly_detectors/test1
|
||||
PUT _ml/anomaly_detectors/test1
|
||||
{
|
||||
"analysis_config":{
|
||||
"bucket_span": "10m",
|
||||
|
@ -123,7 +123,7 @@ PUT _xpack/ml/anomaly_detectors/test1
|
|||
}
|
||||
}
|
||||
|
||||
PUT _xpack/ml/datafeeds/datafeed-test1
|
||||
PUT _ml/datafeeds/datafeed-test1
|
||||
{
|
||||
"job_id": "test1",
|
||||
"indices": ["my_index"],
|
||||
|
@ -160,7 +160,7 @@ You can preview the contents of the {dfeed} by using the following API:
|
|||
|
||||
[source,js]
|
||||
----------------------------------
|
||||
GET _xpack/ml/datafeeds/datafeed-test1/_preview
|
||||
GET _ml/datafeeds/datafeed-test1/_preview
|
||||
----------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:continued]
|
||||
|
@ -211,7 +211,7 @@ that convert your strings to upper or lowercase letters.
|
|||
.Example 2: Concatenating strings
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT _xpack/ml/anomaly_detectors/test2
|
||||
PUT _ml/anomaly_detectors/test2
|
||||
{
|
||||
"analysis_config":{
|
||||
"bucket_span": "10m",
|
||||
|
@ -229,7 +229,7 @@ PUT _xpack/ml/anomaly_detectors/test2
|
|||
}
|
||||
}
|
||||
|
||||
PUT _xpack/ml/datafeeds/datafeed-test2
|
||||
PUT _ml/datafeeds/datafeed-test2
|
||||
{
|
||||
"job_id": "test2",
|
||||
"indices": ["my_index"],
|
||||
|
@ -249,7 +249,7 @@ PUT _xpack/ml/datafeeds/datafeed-test2
|
|||
}
|
||||
}
|
||||
|
||||
GET _xpack/ml/datafeeds/datafeed-test2/_preview
|
||||
GET _ml/datafeeds/datafeed-test2/_preview
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:needs-licence]
|
||||
|
@ -274,7 +274,7 @@ and "SMITH " have been concatenated and an underscore was added:
|
|||
.Example 3: Trimming strings
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST _xpack/ml/datafeeds/datafeed-test2/_update
|
||||
POST _ml/datafeeds/datafeed-test2/_update
|
||||
{
|
||||
"script_fields": {
|
||||
"my_script_field": {
|
||||
|
@ -286,7 +286,7 @@ POST _xpack/ml/datafeeds/datafeed-test2/_update
|
|||
}
|
||||
}
|
||||
|
||||
GET _xpack/ml/datafeeds/datafeed-test2/_preview
|
||||
GET _ml/datafeeds/datafeed-test2/_preview
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:continued]
|
||||
|
@ -310,7 +310,7 @@ has been trimmed to "SMITH":
|
|||
.Example 4: Converting strings to lowercase
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST _xpack/ml/datafeeds/datafeed-test2/_update
|
||||
POST _ml/datafeeds/datafeed-test2/_update
|
||||
{
|
||||
"script_fields": {
|
||||
"my_script_field": {
|
||||
|
@ -322,7 +322,7 @@ POST _xpack/ml/datafeeds/datafeed-test2/_update
|
|||
}
|
||||
}
|
||||
|
||||
GET _xpack/ml/datafeeds/datafeed-test2/_preview
|
||||
GET _ml/datafeeds/datafeed-test2/_preview
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:continued]
|
||||
|
@ -347,7 +347,7 @@ has been converted to "joe":
|
|||
.Example 5: Converting strings to mixed case formats
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST _xpack/ml/datafeeds/datafeed-test2/_update
|
||||
POST _ml/datafeeds/datafeed-test2/_update
|
||||
{
|
||||
"script_fields": {
|
||||
"my_script_field": {
|
||||
|
@ -359,7 +359,7 @@ POST _xpack/ml/datafeeds/datafeed-test2/_update
|
|||
}
|
||||
}
|
||||
|
||||
GET _xpack/ml/datafeeds/datafeed-test2/_preview
|
||||
GET _ml/datafeeds/datafeed-test2/_preview
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:continued]
|
||||
|
@ -384,7 +384,7 @@ has been converted to "Joe":
|
|||
.Example 6: Replacing tokens
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST _xpack/ml/datafeeds/datafeed-test2/_update
|
||||
POST _ml/datafeeds/datafeed-test2/_update
|
||||
{
|
||||
"script_fields": {
|
||||
"my_script_field": {
|
||||
|
@ -396,7 +396,7 @@ POST _xpack/ml/datafeeds/datafeed-test2/_update
|
|||
}
|
||||
}
|
||||
|
||||
GET _xpack/ml/datafeeds/datafeed-test2/_preview
|
||||
GET _ml/datafeeds/datafeed-test2/_preview
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:continued]
|
||||
|
@ -420,7 +420,7 @@ The preview {dfeed} API returns the following results, which show that
|
|||
.Example 7: Regular expression matching and concatenation
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
POST _xpack/ml/datafeeds/datafeed-test2/_update
|
||||
POST _ml/datafeeds/datafeed-test2/_update
|
||||
{
|
||||
"script_fields": {
|
||||
"my_script_field": {
|
||||
|
@ -432,7 +432,7 @@ POST _xpack/ml/datafeeds/datafeed-test2/_update
|
|||
}
|
||||
}
|
||||
|
||||
GET _xpack/ml/datafeeds/datafeed-test2/_preview
|
||||
GET _ml/datafeeds/datafeed-test2/_preview
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:continued]
|
||||
|
@ -457,7 +457,7 @@ The preview {dfeed} API returns the following results, which show that
|
|||
.Example 8: Splitting strings by domain name
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT _xpack/ml/anomaly_detectors/test3
|
||||
PUT _ml/anomaly_detectors/test3
|
||||
{
|
||||
"description":"DNS tunneling",
|
||||
"analysis_config":{
|
||||
|
@ -478,7 +478,7 @@ PUT _xpack/ml/anomaly_detectors/test3
|
|||
}
|
||||
}
|
||||
|
||||
PUT _xpack/ml/datafeeds/datafeed-test3
|
||||
PUT _ml/datafeeds/datafeed-test3
|
||||
{
|
||||
"job_id": "test3",
|
||||
"indices": ["my_index"],
|
||||
|
@ -498,7 +498,7 @@ PUT _xpack/ml/datafeeds/datafeed-test3
|
|||
}
|
||||
}
|
||||
|
||||
GET _xpack/ml/datafeeds/datafeed-test3/_preview
|
||||
GET _ml/datafeeds/datafeed-test3/_preview
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:needs-licence]
|
||||
|
@ -530,7 +530,7 @@ The preview {dfeed} API returns the following results, which show that
|
|||
.Example 9: Transforming geo_point data
|
||||
[source,js]
|
||||
--------------------------------------------------
|
||||
PUT _xpack/ml/anomaly_detectors/test4
|
||||
PUT _ml/anomaly_detectors/test4
|
||||
{
|
||||
"analysis_config":{
|
||||
"bucket_span": "10m",
|
||||
|
@ -547,7 +547,7 @@ PUT _xpack/ml/anomaly_detectors/test4
|
|||
}
|
||||
}
|
||||
|
||||
PUT _xpack/ml/datafeeds/datafeed-test4
|
||||
PUT _ml/datafeeds/datafeed-test4
|
||||
{
|
||||
"job_id": "test4",
|
||||
"indices": ["my_index"],
|
||||
|
@ -567,7 +567,7 @@ PUT _xpack/ml/datafeeds/datafeed-test4
|
|||
}
|
||||
}
|
||||
|
||||
GET _xpack/ml/datafeeds/datafeed-test4/_preview
|
||||
GET _ml/datafeeds/datafeed-test4/_preview
|
||||
--------------------------------------------------
|
||||
// CONSOLE
|
||||
// TEST[skip:needs-licence]
|
||||
|
|
|
@ -67,7 +67,7 @@ routing table without any network timeout issues unlike most other collectors.
|
|||
Each shard is represented by a separate monitoring document.
|
||||
| Jobs | `job_stats`
|
||||
| Gathers details about all machine learning job statistics (for example,
|
||||
`GET /_xpack/ml/anomaly_detectors/_stats`).
|
||||
`GET /_ml/anomaly_detectors/_stats`).
|
||||
+
|
||||
This information only needs to be collected once, so it is collected on the
|
||||
_elected_ master node. However, for the master node to be able to perform the
|
||||
|
|
|
@ -83,10 +83,10 @@ public class XPackIT extends AbstractRollingTestCase {
|
|||
client().performRequest(startTrial);
|
||||
|
||||
String noJobs = EntityUtils.toString(
|
||||
client().performRequest(new Request("GET", "/_xpack/ml/anomaly_detectors")).getEntity());
|
||||
client().performRequest(new Request("GET", "/_ml/anomaly_detectors")).getEntity());
|
||||
assertEquals("{\"count\":0,\"jobs\":[]}", noJobs);
|
||||
|
||||
Request createJob = new Request("PUT", "/_xpack/ml/anomaly_detectors/test_job");
|
||||
Request createJob = new Request("PUT", "/_ml/anomaly_detectors/test_job");
|
||||
createJob.setJsonEntity(
|
||||
"{\n"
|
||||
+ " \"analysis_config\" : {\n"
|
||||
|
|
|
@ -34,7 +34,7 @@ public class MlRestTestStateCleaner {
|
|||
|
||||
@SuppressWarnings("unchecked")
|
||||
private void deleteAllDatafeeds() throws IOException {
|
||||
final Request datafeedsRequest = new Request("GET", "/_xpack/ml/datafeeds");
|
||||
final Request datafeedsRequest = new Request("GET", "/_ml/datafeeds");
|
||||
datafeedsRequest.addParameter("filter_path", "datafeeds");
|
||||
final Response datafeedsResponse = adminClient.performRequest(datafeedsRequest);
|
||||
final List<Map<String, Object>> datafeeds =
|
||||
|
@ -44,11 +44,11 @@ public class MlRestTestStateCleaner {
|
|||
}
|
||||
|
||||
try {
|
||||
adminClient.performRequest(new Request("POST", "/_xpack/ml/datafeeds/_all/_stop"));
|
||||
adminClient.performRequest(new Request("POST", "/_ml/datafeeds/_all/_stop"));
|
||||
} catch (Exception e1) {
|
||||
logger.warn("failed to stop all datafeeds. Forcing stop", e1);
|
||||
try {
|
||||
adminClient.performRequest(new Request("POST", "/_xpack/ml/datafeeds/_all/_stop?force=true"));
|
||||
adminClient.performRequest(new Request("POST", "/_ml/datafeeds/_all/_stop?force=true"));
|
||||
} catch (Exception e2) {
|
||||
logger.warn("Force-closing all data feeds failed", e2);
|
||||
}
|
||||
|
@ -58,12 +58,12 @@ public class MlRestTestStateCleaner {
|
|||
|
||||
for (Map<String, Object> datafeed : datafeeds) {
|
||||
String datafeedId = (String) datafeed.get("datafeed_id");
|
||||
adminClient.performRequest(new Request("DELETE", "/_xpack/ml/datafeeds/" + datafeedId));
|
||||
adminClient.performRequest(new Request("DELETE", "/_ml/datafeeds/" + datafeedId));
|
||||
}
|
||||
}
|
||||
|
||||
private void deleteAllJobs() throws IOException {
|
||||
final Request jobsRequest = new Request("GET", "/_xpack/ml/anomaly_detectors");
|
||||
final Request jobsRequest = new Request("GET", "/_ml/anomaly_detectors");
|
||||
jobsRequest.addParameter("filter_path", "jobs");
|
||||
final Response response = adminClient.performRequest(jobsRequest);
|
||||
@SuppressWarnings("unchecked")
|
||||
|
@ -74,11 +74,11 @@ public class MlRestTestStateCleaner {
|
|||
}
|
||||
|
||||
try {
|
||||
adminClient.performRequest(new Request("POST", "/_xpack/ml/anomaly_detectors/_all/_close"));
|
||||
adminClient.performRequest(new Request("POST", "/_ml/anomaly_detectors/_all/_close"));
|
||||
} catch (Exception e1) {
|
||||
logger.warn("failed to close all jobs. Forcing closed", e1);
|
||||
try {
|
||||
adminClient.performRequest(new Request("POST", "/_xpack/ml/anomaly_detectors/_all/_close?force=true"));
|
||||
adminClient.performRequest(new Request("POST", "/_ml/anomaly_detectors/_all/_close?force=true"));
|
||||
} catch (Exception e2) {
|
||||
logger.warn("Force-closing all jobs failed", e2);
|
||||
}
|
||||
|
@ -88,7 +88,7 @@ public class MlRestTestStateCleaner {
|
|||
|
||||
for (Map<String, Object> jobConfig : jobConfigs) {
|
||||
String jobId = (String) jobConfig.get("job_id");
|
||||
adminClient.performRequest(new Request("DELETE", "/_xpack/ml/anomaly_detectors/" + jobId));
|
||||
adminClient.performRequest(new Request("DELETE", "/_ml/anomaly_detectors/" + jobId));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -58,6 +58,8 @@ public class MlPluginDisabledIT extends ESRestTestCase {
|
|||
Request request = new Request("PUT", MachineLearning.BASE_PATH + "anomaly_detectors/foo");
|
||||
request.setJsonEntity(Strings.toString(xContentBuilder));
|
||||
ResponseException exception = expectThrows(ResponseException.class, () -> client().performRequest(request));
|
||||
assertThat(exception.getMessage(), containsString("no handler found for uri [/_xpack/ml/anomaly_detectors/foo] and method [PUT]"));
|
||||
assertThat(exception.getMessage(), containsString("method [PUT]"));
|
||||
assertThat(exception.getMessage(), containsString("URI [/_ml/anomaly_detectors/foo]"));
|
||||
assertThat(exception.getMessage(), containsString("400 Bad Request"));
|
||||
}
|
||||
}
|
||||
|
|
|
@ -955,7 +955,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
|||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
assertThat(EntityUtils.toString(response.getEntity()), equalTo("{\"stopped\":true}"));
|
||||
|
||||
client().performRequest(new Request("POST", "/_xpack/ml/anomaly_detectors/" + jobId + "/_close"));
|
||||
client().performRequest(new Request("POST", "/_ml/anomaly_detectors/" + jobId + "/_close"));
|
||||
|
||||
response = client().performRequest(new Request("DELETE", MachineLearning.BASE_PATH + "datafeeds/" + datafeedId));
|
||||
assertThat(response.getStatusLine().getStatusCode(), equalTo(200));
|
||||
|
@ -992,7 +992,7 @@ public class DatafeedJobsRestIT extends ESRestTestCase {
|
|||
assertThat(EntityUtils.toString(response.getEntity()), equalTo("{\"acknowledged\":true}"));
|
||||
|
||||
expectThrows(ResponseException.class,
|
||||
() -> client().performRequest(new Request("GET", "/_xpack/ml/datafeeds/" + datafeedId)));
|
||||
() -> client().performRequest(new Request("GET", "/_ml/datafeeds/" + datafeedId)));
|
||||
}
|
||||
|
||||
private class LookbackOnlyTestHelper {
|
||||
|
|
|
@ -244,7 +244,8 @@ import static java.util.Collections.emptyList;
|
|||
|
||||
public class MachineLearning extends Plugin implements ActionPlugin, AnalysisPlugin, PersistentTaskPlugin {
|
||||
public static final String NAME = "ml";
|
||||
public static final String BASE_PATH = "/_xpack/ml/";
|
||||
public static final String BASE_PATH = "/_ml/";
|
||||
public static final String PRE_V7_BASE_PATH = "/_xpack/ml/";
|
||||
public static final String DATAFEED_THREAD_POOL_NAME = NAME + "_datafeed";
|
||||
public static final String AUTODETECT_THREAD_POOL_NAME = NAME + "_autodetect";
|
||||
public static final String UTILITY_THREAD_POOL_NAME = NAME + "_utility";
|
||||
|
|
|
@ -5,7 +5,9 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
|
@ -16,16 +18,24 @@ import org.elasticsearch.xpack.core.ml.action.DeleteExpiredDataAction;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.DELETE;
|
||||
|
||||
public class RestDeleteExpiredDataAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestDeleteExpiredDataAction.class));
|
||||
|
||||
public RestDeleteExpiredDataAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.DELETE, MachineLearning.BASE_PATH + "_delete_expired_data", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
DELETE, MachineLearning.BASE_PATH + "_delete_expired_data", this,
|
||||
DELETE, MachineLearning.PRE_V7_BASE_PATH + "_delete_expired_data", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_delete_expired_data_action";
|
||||
return "ml_delete_expired_data_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,8 +5,10 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.ElasticsearchParseException;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
|
@ -23,18 +25,26 @@ import java.util.Collections;
|
|||
import java.util.Set;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.POST;
|
||||
|
||||
public class RestFindFileStructureAction extends BaseRestHandler {
|
||||
|
||||
private static final TimeValue DEFAULT_TIMEOUT = new TimeValue(25, TimeUnit.SECONDS);
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestFindFileStructureAction.class));
|
||||
|
||||
public RestFindFileStructureAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.POST, MachineLearning.BASE_PATH + "find_file_structure", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
POST, MachineLearning.BASE_PATH + "find_file_structure", this,
|
||||
POST, MachineLearning.PRE_V7_BASE_PATH + "find_file_structure", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_find_file_structure_action";
|
||||
return "ml_find_file_structure_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,7 +5,9 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
|
@ -16,16 +18,24 @@ import org.elasticsearch.xpack.ml.MachineLearning;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.GET;
|
||||
|
||||
public class RestMlInfoAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestMlInfoAction.class));
|
||||
|
||||
public RestMlInfoAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.GET, MachineLearning.BASE_PATH + "info", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
GET, MachineLearning.BASE_PATH + "info", this,
|
||||
GET, MachineLearning.PRE_V7_BASE_PATH + "info", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_info_action";
|
||||
return "ml_info_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,7 +5,9 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.calendar;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
|
@ -17,17 +19,24 @@ import org.elasticsearch.xpack.ml.MachineLearning;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.DELETE;
|
||||
|
||||
public class RestDeleteCalendarAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestDeleteCalendarAction.class));
|
||||
|
||||
public RestDeleteCalendarAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.DELETE,
|
||||
MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
DELETE, MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}", this,
|
||||
DELETE, MachineLearning.PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_delete_calendar_action";
|
||||
return "ml_delete_calendar_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,7 +5,9 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.calendar;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
|
@ -18,18 +20,26 @@ import org.elasticsearch.xpack.ml.MachineLearning;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.DELETE;
|
||||
|
||||
public class RestDeleteCalendarEventAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestDeleteCalendarEventAction.class));
|
||||
|
||||
public RestDeleteCalendarEventAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.DELETE,
|
||||
MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/events/{" +
|
||||
ScheduledEvent.EVENT_ID.getPreferredName() + "}", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
DELETE, MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/events/{" +
|
||||
ScheduledEvent.EVENT_ID.getPreferredName() + "}", this,
|
||||
DELETE, MachineLearning.PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/events/{" +
|
||||
ScheduledEvent.EVENT_ID.getPreferredName() + "}", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_delete_calendar_event_action";
|
||||
return "ml_delete_calendar_event_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,7 +5,9 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.calendar;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
|
@ -18,18 +20,26 @@ import org.elasticsearch.xpack.core.ml.job.config.Job;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.DELETE;
|
||||
|
||||
public class RestDeleteCalendarJobAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestDeleteCalendarJobAction.class));
|
||||
|
||||
public RestDeleteCalendarJobAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.DELETE,
|
||||
MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/jobs/{" +
|
||||
Job.ID.getPreferredName() + "}", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
DELETE, MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/jobs/{" +
|
||||
Job.ID.getPreferredName() + "}", this,
|
||||
DELETE, MachineLearning.PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/jobs/{" +
|
||||
Job.ID.getPreferredName() + "}", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_delete_calendar_job_action";
|
||||
return "ml_delete_calendar_job_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,7 +5,9 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.calendar;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
|
@ -20,16 +22,24 @@ import org.elasticsearch.xpack.core.ml.job.config.Job;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.GET;
|
||||
|
||||
public class RestGetCalendarEventsAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestGetCalendarEventsAction.class));
|
||||
|
||||
public RestGetCalendarEventsAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.GET,
|
||||
MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/events", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
GET, MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/events", this,
|
||||
GET, MachineLearning.PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/events", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_get_calendar_events_action";
|
||||
return "ml_get_calendar_events_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,8 +5,10 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.calendar;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
|
@ -20,23 +22,36 @@ import org.elasticsearch.xpack.core.ml.calendars.Calendar;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.GET;
|
||||
import static org.elasticsearch.rest.RestRequest.Method.POST;
|
||||
|
||||
public class RestGetCalendarsAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestGetCalendarsAction.class));
|
||||
|
||||
public RestGetCalendarsAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.GET, MachineLearning.BASE_PATH + "calendars/{" +
|
||||
Calendar.ID.getPreferredName() + "}", this);
|
||||
controller.registerHandler(RestRequest.Method.GET, MachineLearning.BASE_PATH + "calendars/", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
GET, MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}", this,
|
||||
GET, MachineLearning.PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}", deprecationLogger);
|
||||
controller.registerWithDeprecatedHandler(
|
||||
GET, MachineLearning.BASE_PATH + "calendars/", this,
|
||||
GET, MachineLearning.PRE_V7_BASE_PATH + "calendars/", deprecationLogger);
|
||||
|
||||
// endpoints that support body parameters must also accept POST
|
||||
controller.registerHandler(RestRequest.Method.POST, MachineLearning.BASE_PATH + "calendars/{" +
|
||||
Calendar.ID.getPreferredName() + "}", this);
|
||||
controller.registerHandler(RestRequest.Method.POST, MachineLearning.BASE_PATH + "calendars/", this);
|
||||
controller.registerWithDeprecatedHandler(
|
||||
POST, MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}", this,
|
||||
POST, MachineLearning.PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}", deprecationLogger);
|
||||
controller.registerWithDeprecatedHandler(
|
||||
POST, MachineLearning.BASE_PATH + "calendars/", this,
|
||||
POST, MachineLearning.PRE_V7_BASE_PATH + "calendars/", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_get_calendars_action";
|
||||
return "ml_get_calendars_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,7 +5,9 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.calendar;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
|
@ -18,17 +20,24 @@ import org.elasticsearch.xpack.core.ml.calendars.Calendar;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.POST;
|
||||
|
||||
public class RestPostCalendarEventAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestPostCalendarEventAction.class));
|
||||
|
||||
public RestPostCalendarEventAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.POST,
|
||||
MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/events", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
POST, MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/events", this,
|
||||
POST, MachineLearning.PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/events", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_post_calendar_event_action";
|
||||
return "ml_post_calendar_event_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,7 +5,9 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.calendar;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
|
@ -19,17 +21,24 @@ import org.elasticsearch.xpack.core.ml.calendars.Calendar;
|
|||
import java.io.IOException;
|
||||
import java.util.Collections;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.PUT;
|
||||
|
||||
public class RestPutCalendarAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestPutCalendarAction.class));
|
||||
|
||||
public RestPutCalendarAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.PUT,
|
||||
MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
PUT, MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}", this,
|
||||
PUT, MachineLearning.PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_put_calendar_action";
|
||||
return "ml_put_calendar_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,7 +5,9 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.calendar;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
|
@ -18,18 +20,26 @@ import org.elasticsearch.xpack.core.ml.job.config.Job;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.PUT;
|
||||
|
||||
public class RestPutCalendarJobAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestPutCalendarJobAction.class));
|
||||
|
||||
public RestPutCalendarJobAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.PUT,
|
||||
MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/jobs/{" +
|
||||
Job.ID.getPreferredName() + "}", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
PUT, MachineLearning.BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/jobs/{" +
|
||||
Job.ID.getPreferredName() + "}", this,
|
||||
PUT, MachineLearning.PRE_V7_BASE_PATH + "calendars/{" + Calendar.ID.getPreferredName() + "}/jobs/{" +
|
||||
Job.ID.getPreferredName() + "}", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_put_calendar_job_action";
|
||||
return "ml_put_calendar_job_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,7 +5,9 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.datafeeds;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
|
@ -18,17 +20,24 @@ import org.elasticsearch.xpack.ml.MachineLearning;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.DELETE;
|
||||
|
||||
public class RestDeleteDatafeedAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestDeleteDatafeedAction.class));
|
||||
|
||||
public RestDeleteDatafeedAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.DELETE, MachineLearning.BASE_PATH + "datafeeds/{"
|
||||
+ DatafeedConfig.ID.getPreferredName() + "}", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
DELETE, MachineLearning.BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}", this,
|
||||
DELETE, MachineLearning.PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_delete_datafeed_action";
|
||||
return "ml_delete_datafeed_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,8 +5,10 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.datafeeds;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
|
@ -18,19 +20,27 @@ import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.GET;
|
||||
|
||||
public class RestGetDatafeedStatsAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestGetDatafeedStatsAction.class));
|
||||
|
||||
public RestGetDatafeedStatsAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.GET, MachineLearning.BASE_PATH
|
||||
+ "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_stats", this);
|
||||
controller.registerHandler(RestRequest.Method.GET, MachineLearning.BASE_PATH
|
||||
+ "datafeeds/_stats", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
GET, MachineLearning.BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_stats", this,
|
||||
GET, MachineLearning.PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_stats", deprecationLogger);
|
||||
controller.registerWithDeprecatedHandler(
|
||||
GET, MachineLearning.BASE_PATH + "datafeeds/_stats", this,
|
||||
GET, MachineLearning.PRE_V7_BASE_PATH + "datafeeds/_stats", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_get_datafeed_stats_action";
|
||||
return "ml_get_datafeed_stats_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,7 +5,9 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.datafeeds;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
|
@ -17,19 +19,27 @@ import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.GET;
|
||||
|
||||
public class RestGetDatafeedsAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestGetDatafeedsAction.class));
|
||||
|
||||
public RestGetDatafeedsAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.GET, MachineLearning.BASE_PATH
|
||||
+ "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}", this);
|
||||
controller.registerHandler(RestRequest.Method.GET, MachineLearning.BASE_PATH
|
||||
+ "datafeeds", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
GET, MachineLearning.BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}", this,
|
||||
GET, MachineLearning.PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}", deprecationLogger);
|
||||
controller.registerWithDeprecatedHandler(
|
||||
GET, MachineLearning.BASE_PATH + "datafeeds", this,
|
||||
GET, MachineLearning.PRE_V7_BASE_PATH + "datafeeds", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_get_datafeeds_action";
|
||||
return "ml_get_datafeeds_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,7 +5,9 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.datafeeds;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
|
@ -17,17 +19,24 @@ import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.GET;
|
||||
|
||||
public class RestPreviewDatafeedAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestPreviewDatafeedAction.class));
|
||||
|
||||
public RestPreviewDatafeedAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.GET,
|
||||
MachineLearning.BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_preview", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
GET, MachineLearning.BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_preview", this,
|
||||
GET, MachineLearning.PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_preview", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_preview_datafeed_action";
|
||||
return "ml_preview_datafeed_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,7 +5,9 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.datafeeds;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
|
@ -18,17 +20,24 @@ import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.PUT;
|
||||
|
||||
public class RestPutDatafeedAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestPutDatafeedAction.class));
|
||||
|
||||
public RestPutDatafeedAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.PUT, MachineLearning.BASE_PATH + "datafeeds/{"
|
||||
+ DatafeedConfig.ID.getPreferredName() + "}", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
PUT, MachineLearning.BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}", this,
|
||||
PUT, MachineLearning.PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_put_datafeed_action";
|
||||
return "ml_put_datafeed_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,8 +5,10 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.datafeeds;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -24,19 +26,26 @@ import org.elasticsearch.xpack.ml.MachineLearning;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.POST;
|
||||
|
||||
public class RestStartDatafeedAction extends BaseRestHandler {
|
||||
|
||||
private static final String DEFAULT_START = "0";
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestStartDatafeedAction.class));
|
||||
|
||||
public RestStartDatafeedAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.POST,
|
||||
MachineLearning.BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_start", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
POST, MachineLearning.BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_start", this,
|
||||
POST, MachineLearning.PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_start", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_start_datafeed_action";
|
||||
return "ml_start_datafeed_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,7 +5,9 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.datafeeds;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -24,17 +26,24 @@ import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.POST;
|
||||
|
||||
public class RestStopDatafeedAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestStopDatafeedAction.class));
|
||||
|
||||
public RestStopDatafeedAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.POST, MachineLearning.BASE_PATH + "datafeeds/{"
|
||||
+ DatafeedConfig.ID.getPreferredName() + "}/_stop", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
POST, MachineLearning.BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_stop", this,
|
||||
POST, MachineLearning.PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_stop", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_stop_datafeed_action";
|
||||
return "ml_stop_datafeed_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,7 +5,9 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.datafeeds;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
|
@ -18,17 +20,24 @@ import org.elasticsearch.xpack.core.ml.datafeed.DatafeedConfig;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.POST;
|
||||
|
||||
public class RestUpdateDatafeedAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestUpdateDatafeedAction.class));
|
||||
|
||||
public RestUpdateDatafeedAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.POST, MachineLearning.BASE_PATH + "datafeeds/{"
|
||||
+ DatafeedConfig.ID.getPreferredName() + "}/_update", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
POST, MachineLearning.BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_update", this,
|
||||
POST, MachineLearning.PRE_V7_BASE_PATH + "datafeeds/{" + DatafeedConfig.ID.getPreferredName() + "}/_update", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_update_datafeed_action";
|
||||
return "ml_update_datafeed_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,7 +5,9 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.filter;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
|
@ -17,17 +19,24 @@ import org.elasticsearch.xpack.ml.MachineLearning;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.DELETE;
|
||||
|
||||
public class RestDeleteFilterAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestDeleteFilterAction.class));
|
||||
|
||||
public RestDeleteFilterAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.DELETE,
|
||||
MachineLearning.BASE_PATH + "filters/{" + Request.FILTER_ID.getPreferredName() + "}", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
DELETE, MachineLearning.BASE_PATH + "filters/{" + Request.FILTER_ID.getPreferredName() + "}", this,
|
||||
DELETE, MachineLearning.PRE_V7_BASE_PATH + "filters/{" + Request.FILTER_ID.getPreferredName() + "}", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_delete_filter_action";
|
||||
return "ml_delete_filter_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,8 +5,10 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.filter;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
|
@ -19,18 +21,27 @@ import org.elasticsearch.xpack.core.ml.job.config.MlFilter;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.GET;
|
||||
|
||||
public class RestGetFiltersAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestGetFiltersAction.class));
|
||||
|
||||
public RestGetFiltersAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.GET, MachineLearning.BASE_PATH + "filters/{" + MlFilter.ID.getPreferredName() + "}",
|
||||
this);
|
||||
controller.registerHandler(RestRequest.Method.GET, MachineLearning.BASE_PATH + "filters/", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
GET, MachineLearning.BASE_PATH + "filters/{" + MlFilter.ID.getPreferredName() + "}", this,
|
||||
GET, MachineLearning.PRE_V7_BASE_PATH + "filters/{" + MlFilter.ID.getPreferredName() + "}", deprecationLogger);
|
||||
controller.registerWithDeprecatedHandler(
|
||||
GET, MachineLearning.BASE_PATH + "filters/", this,
|
||||
GET, MachineLearning.PRE_V7_BASE_PATH + "filters/", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_get_filters_action";
|
||||
return "ml_get_filters_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,7 +5,9 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.filter;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
|
@ -18,17 +20,24 @@ import org.elasticsearch.xpack.ml.MachineLearning;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.PUT;
|
||||
|
||||
public class RestPutFilterAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestPutFilterAction.class));
|
||||
|
||||
public RestPutFilterAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.PUT,
|
||||
MachineLearning.BASE_PATH + "filters/{" + MlFilter.ID.getPreferredName() + "}", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
PUT, MachineLearning.BASE_PATH + "filters/{" + MlFilter.ID.getPreferredName() + "}", this,
|
||||
PUT, MachineLearning.PRE_V7_BASE_PATH + "filters/{" + MlFilter.ID.getPreferredName() + "}", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_put_filter_action";
|
||||
return "ml_put_filter_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,7 +5,9 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.filter;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
|
@ -18,17 +20,24 @@ import org.elasticsearch.xpack.ml.MachineLearning;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.POST;
|
||||
|
||||
public class RestUpdateFilterAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestUpdateFilterAction.class));
|
||||
|
||||
public RestUpdateFilterAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.POST,
|
||||
MachineLearning.BASE_PATH + "filters/{" + MlFilter.ID.getPreferredName() + "}/_update", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
POST, MachineLearning.BASE_PATH + "filters/{" + MlFilter.ID.getPreferredName() + "}/_update", this,
|
||||
POST, MachineLearning.PRE_V7_BASE_PATH + "filters/{" + MlFilter.ID.getPreferredName() + "}/_update", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_update_filter_action";
|
||||
return "ml_update_filter_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,7 +5,9 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.job;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
|
@ -19,17 +21,24 @@ import org.elasticsearch.xpack.ml.MachineLearning;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.POST;
|
||||
|
||||
public class RestCloseJobAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestCloseJobAction.class));
|
||||
|
||||
public RestCloseJobAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.POST, MachineLearning.BASE_PATH
|
||||
+ "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_close", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_close", this,
|
||||
POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_close", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_close_job_action";
|
||||
return "ml_close_job_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,8 +5,10 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.job;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
|
@ -19,20 +21,26 @@ import org.elasticsearch.xpack.ml.MachineLearning;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.DELETE;
|
||||
|
||||
public class RestDeleteForecastAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestDeleteForecastAction.class));
|
||||
|
||||
public RestDeleteForecastAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.DELETE,
|
||||
MachineLearning.BASE_PATH +
|
||||
"anomaly_detectors/{" + Job.ID.getPreferredName() +
|
||||
"}/_forecast/{" + Forecast.FORECAST_ID.getPreferredName() + "}",
|
||||
this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
DELETE, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() +
|
||||
"}/_forecast/{" + Forecast.FORECAST_ID.getPreferredName() + "}", this,
|
||||
DELETE, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() +
|
||||
"}/_forecast/{" + Forecast.FORECAST_ID.getPreferredName() + "}", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_delete_forecast_action";
|
||||
return "ml_delete_forecast_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,7 +5,9 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.job;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
|
@ -23,17 +25,24 @@ import org.elasticsearch.xpack.ml.MachineLearning;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.DELETE;
|
||||
|
||||
public class RestDeleteJobAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestDeleteJobAction.class));
|
||||
|
||||
public RestDeleteJobAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.DELETE, MachineLearning.BASE_PATH
|
||||
+ "anomaly_detectors/{" + Job.ID.getPreferredName() + "}", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
DELETE, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}", this,
|
||||
DELETE, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_delete_job_action";
|
||||
return "ml_delete_job_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,7 +5,9 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.job;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
|
@ -18,23 +20,30 @@ import org.elasticsearch.xpack.core.ml.job.config.Job;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.POST;
|
||||
|
||||
public class RestFlushJobAction extends BaseRestHandler {
|
||||
|
||||
private final boolean DEFAULT_CALC_INTERIM = false;
|
||||
private final String DEFAULT_START = "";
|
||||
private final String DEFAULT_END = "";
|
||||
private final String DEFAULT_ADVANCE_TIME = "";
|
||||
private final String DEFAULT_SKIP_TIME = "";
|
||||
private static final boolean DEFAULT_CALC_INTERIM = false;
|
||||
private static final String DEFAULT_START = "";
|
||||
private static final String DEFAULT_END = "";
|
||||
private static final String DEFAULT_ADVANCE_TIME = "";
|
||||
private static final String DEFAULT_SKIP_TIME = "";
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestFlushJobAction.class));
|
||||
|
||||
public RestFlushJobAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.POST, MachineLearning.BASE_PATH
|
||||
+ "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_flush", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_flush", this,
|
||||
POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_flush", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_flush_job_action";
|
||||
return "ml_flush_job_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,7 +5,9 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.job;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
|
@ -18,17 +20,24 @@ import org.elasticsearch.xpack.core.ml.job.config.Job;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.POST;
|
||||
|
||||
public class RestForecastJobAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestForecastJobAction.class));
|
||||
|
||||
public RestForecastJobAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.POST,
|
||||
MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_forecast", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_forecast", this,
|
||||
POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_forecast", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_forecast_job_action";
|
||||
return "ml_forecast_job_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,9 +5,11 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.job;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
|
@ -19,19 +21,27 @@ import org.elasticsearch.xpack.core.ml.job.config.Job;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.GET;
|
||||
|
||||
public class RestGetJobStatsAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestGetJobStatsAction.class));
|
||||
|
||||
public RestGetJobStatsAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.GET, MachineLearning.BASE_PATH
|
||||
+ "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_stats", this);
|
||||
controller.registerHandler(RestRequest.Method.GET, MachineLearning.BASE_PATH
|
||||
+ "anomaly_detectors/_stats", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
GET, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_stats", this,
|
||||
GET, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_stats", deprecationLogger);
|
||||
controller.registerWithDeprecatedHandler(
|
||||
GET, MachineLearning.BASE_PATH + "anomaly_detectors/_stats", this,
|
||||
GET, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/_stats", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_get_job_stats_action";
|
||||
return "ml_get_job_stats_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,9 +5,11 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.job;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.cluster.metadata.MetaData;
|
||||
import org.elasticsearch.common.Strings;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
|
@ -19,20 +21,28 @@ import org.elasticsearch.xpack.core.ml.job.config.Job;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.GET;
|
||||
|
||||
public class RestGetJobsAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestGetJobsAction.class));
|
||||
|
||||
public RestGetJobsAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
|
||||
controller.registerHandler(RestRequest.Method.GET, MachineLearning.BASE_PATH
|
||||
+ "anomaly_detectors/{" + Job.ID.getPreferredName() + "}", this);
|
||||
controller.registerHandler(RestRequest.Method.GET, MachineLearning.BASE_PATH
|
||||
+ "anomaly_detectors", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
GET, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}", this,
|
||||
GET, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}", deprecationLogger);
|
||||
controller.registerWithDeprecatedHandler(
|
||||
GET, MachineLearning.BASE_PATH + "anomaly_detectors", this,
|
||||
GET, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_get_jobs_action";
|
||||
return "ml_get_jobs_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,8 +5,10 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.job;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.action.support.master.AcknowledgedResponse;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.unit.TimeValue;
|
||||
import org.elasticsearch.common.xcontent.XContentBuilder;
|
||||
|
@ -23,17 +25,24 @@ import org.elasticsearch.xpack.ml.MachineLearning;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.POST;
|
||||
|
||||
public class RestOpenJobAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestOpenJobAction.class));
|
||||
|
||||
public RestOpenJobAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.POST, MachineLearning.BASE_PATH
|
||||
+ "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_open", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_open", this,
|
||||
POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_open", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_open_job_action";
|
||||
return "ml_open_job_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,7 +5,9 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.job;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
|
@ -17,20 +19,27 @@ import org.elasticsearch.xpack.core.ml.job.config.Job;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.POST;
|
||||
|
||||
public class RestPostDataAction extends BaseRestHandler {
|
||||
|
||||
private static final String DEFAULT_RESET_START = "";
|
||||
private static final String DEFAULT_RESET_END = "";
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestPostDataAction.class));
|
||||
|
||||
public RestPostDataAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.POST, MachineLearning.BASE_PATH
|
||||
+ "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_data", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_data", this,
|
||||
POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_data", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_post_data_action";
|
||||
return "ml_post_data_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
@ -47,4 +56,4 @@ public class RestPostDataAction extends BaseRestHandler {
|
|||
public boolean supportsContentStream() {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -5,7 +5,9 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.job;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
|
@ -18,16 +20,24 @@ import org.elasticsearch.xpack.core.ml.job.config.Job;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.POST;
|
||||
|
||||
public class RestPostJobUpdateAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestPostJobUpdateAction.class));
|
||||
|
||||
public RestPostJobUpdateAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.POST,
|
||||
MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_update", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
POST, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_update", this,
|
||||
POST, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}/_update", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_post_job_update_action";
|
||||
return "ml_post_job_update_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,7 +5,9 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.job;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.common.xcontent.XContentParser;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
|
@ -18,17 +20,24 @@ import org.elasticsearch.xpack.core.ml.job.config.Job;
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.PUT;
|
||||
|
||||
public class RestPutJobAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestPutJobAction.class));
|
||||
|
||||
public RestPutJobAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.PUT,
|
||||
MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
PUT, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}", this,
|
||||
PUT, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() + "}", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_put_job_action";
|
||||
return "ml_put_job_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
|
@ -5,7 +5,9 @@
|
|||
*/
|
||||
package org.elasticsearch.xpack.ml.rest.modelsnapshots;
|
||||
|
||||
import org.apache.logging.log4j.LogManager;
|
||||
import org.elasticsearch.client.node.NodeClient;
|
||||
import org.elasticsearch.common.logging.DeprecationLogger;
|
||||
import org.elasticsearch.common.settings.Settings;
|
||||
import org.elasticsearch.rest.BaseRestHandler;
|
||||
import org.elasticsearch.rest.RestController;
|
||||
|
@ -18,17 +20,26 @@ import org.elasticsearch.xpack.core.ml.job.process.autodetect.state.ModelSnapsho
|
|||
|
||||
import java.io.IOException;
|
||||
|
||||
import static org.elasticsearch.rest.RestRequest.Method.DELETE;
|
||||
|
||||
public class RestDeleteModelSnapshotAction extends BaseRestHandler {
|
||||
|
||||
private static final DeprecationLogger deprecationLogger =
|
||||
new DeprecationLogger(LogManager.getLogger(RestDeleteModelSnapshotAction.class));
|
||||
|
||||
public RestDeleteModelSnapshotAction(Settings settings, RestController controller) {
|
||||
super(settings);
|
||||
controller.registerHandler(RestRequest.Method.DELETE, MachineLearning.BASE_PATH + "anomaly_detectors/{"
|
||||
+ Job.ID.getPreferredName() + "}/model_snapshots/{" + ModelSnapshotField.SNAPSHOT_ID.getPreferredName() + "}", this);
|
||||
// TODO: remove deprecated endpoint in 8.0.0
|
||||
controller.registerWithDeprecatedHandler(
|
||||
DELETE, MachineLearning.BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() +
|
||||
"}/model_snapshots/{" + ModelSnapshotField.SNAPSHOT_ID.getPreferredName() + "}", this,
|
||||
DELETE, MachineLearning.PRE_V7_BASE_PATH + "anomaly_detectors/{" + Job.ID.getPreferredName() +
|
||||
"}/model_snapshots/{" + ModelSnapshotField.SNAPSHOT_ID.getPreferredName() + "}", deprecationLogger);
|
||||
}
|
||||
|
||||
@Override
|
||||
public String getName() {
|
||||
return "xpack_ml_delete_model_snapshot_action";
|
||||
return "ml_delete_model_snapshot_action";
|
||||
}
|
||||
|
||||
@Override
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue