[TEST] Don't duplicate the MlRestTestStateCleaner class (elastic/x-pack-elasticsearch#1127)

We didn't realise it was possible for a qa module to depend on the
test classes of the plugin module, so we duplicated a test class.
But it turns out it IS possible to declare this dependency and avoid
the duplication.

Original commit: elastic/x-pack-elasticsearch@b6a21cda28
This commit is contained in:
David Roberts 2017-04-20 09:13:04 +01:00 committed by GitHub
parent 7c3a3cce80
commit b03147bea9
2 changed files with 1 additions and 128 deletions

View File

@ -3,6 +3,7 @@ apply plugin: 'elasticsearch.rest-test'
dependencies {
testCompile project(path: ':x-pack-elasticsearch:plugin', configuration: 'runtime')
testCompile project(path: ':x-pack-elasticsearch:plugin', configuration: 'testArtifacts')
}
// bring in machine learning rest test suite

View File

@ -1,128 +0,0 @@
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
package org.elasticsearch.xpack.ml.integration;
import org.apache.logging.log4j.Logger;
import org.elasticsearch.client.RestClient;
import org.elasticsearch.common.xcontent.support.XContentMapValues;
import org.elasticsearch.test.rest.ESRestTestCase;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import java.util.Map;
/*
* NOTE: a copy if this file resides in :x-pack-elasticsearch:plugin
*
* Therefore any changes here, have to be transfered there, too.
* (Or eventually fix it by introducing a common test infrastructure package)
*/
public class MlRestTestStateCleaner {
private final Logger logger;
private final RestClient adminClient;
private final ESRestTestCase testCase;
public MlRestTestStateCleaner(Logger logger, RestClient adminClient, ESRestTestCase testCase) {
this.logger = logger;
this.adminClient = adminClient;
this.testCase = testCase;
}
public void clearMlMetadata() throws IOException {
deleteAllDatafeeds();
deleteAllJobs();
deleteDotML();
}
@SuppressWarnings("unchecked")
private void deleteAllDatafeeds() throws IOException {
Map<String, Object> clusterStateAsMap = testCase.entityAsMap(adminClient.performRequest("GET", "/_cluster/state",
Collections.singletonMap("filter_path", "metadata.ml.datafeeds")));
List<Map<String, Object>> datafeeds =
(List<Map<String, Object>>) XContentMapValues.extractValue("metadata.ml.datafeeds", clusterStateAsMap);
if (datafeeds == null) {
return;
}
try {
int statusCode = adminClient.performRequest("POST", "/_xpack/ml/datafeeds/_all/_stop")
.getStatusLine().getStatusCode();
if (statusCode != 200) {
logger.error("Got status code " + statusCode + " when stopping datafeeds");
}
} catch (Exception e1) {
logger.warn("failed to stop all datafeeds. Forcing stop", e1);
try {
int statusCode = adminClient
.performRequest("POST", "/_xpack/ml/datafeeds/_all/_stop?force=true")
.getStatusLine().getStatusCode();
if (statusCode != 200) {
logger.error("Got status code " + statusCode + " when stopping datafeeds");
}
} catch (Exception e2) {
logger.warn("Force-closing all data feeds failed", e2);
}
throw new RuntimeException(
"Had to resort to force-stopping datafeeds, something went wrong?", e1);
}
for (Map<String, Object> datafeed : datafeeds) {
String datafeedId = (String) datafeed.get("datafeed_id");
int statusCode = adminClient.performRequest("DELETE", "/_xpack/ml/datafeeds/" + datafeedId).getStatusLine().getStatusCode();
if (statusCode != 200) {
logger.error("Got status code " + statusCode + " when deleting datafeed " + datafeedId);
}
}
}
private void deleteAllJobs() throws IOException {
Map<String, Object> clusterStateAsMap = testCase.entityAsMap(adminClient.performRequest("GET", "/_cluster/state",
Collections.singletonMap("filter_path", "metadata.ml.jobs")));
@SuppressWarnings("unchecked")
List<Map<String, Object>> jobConfigs =
(List<Map<String, Object>>) XContentMapValues.extractValue("metadata.ml.jobs", clusterStateAsMap);
if (jobConfigs == null) {
return;
}
try {
int statusCode = adminClient
.performRequest("POST", "/_xpack/ml/anomaly_detectors/_all/_close")
.getStatusLine().getStatusCode();
if (statusCode != 200) {
logger.error("Got status code " + statusCode + " when closing all jobs");
}
} catch (Exception e1) {
logger.warn("failed to close all jobs. Forcing closed", e1);
try {
adminClient.performRequest("POST",
"/_xpack/ml/anomaly_detectors/_all/_close?force=true");
} catch (Exception e2) {
logger.warn("Force-closing all jobs failed", e2);
}
throw new RuntimeException("Had to resort to force-closing jobs, something went wrong?",
e1);
}
for (Map<String, Object> jobConfig : jobConfigs) {
String jobId = (String) jobConfig.get("job_id");
int statusCode = adminClient.performRequest("DELETE", "/_xpack/ml/anomaly_detectors/" + jobId).getStatusLine().getStatusCode();
if (statusCode != 200) {
logger.error("Got status code " + statusCode + " when deleting job " + jobId);
}
}
}
private void deleteDotML() throws IOException {
int statusCode = adminClient.performRequest("DELETE", ".ml-*?ignore_unavailable=true").getStatusLine().getStatusCode();
if (statusCode != 200) {
logger.error("Got status code " + statusCode + " when deleting .ml-* indexes");
}
}
}